Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- Spark Command: /opt/jdk1.8.0_112/bin/java -cp /etc/dse/spark/:/usr/share/dse/spark/jars/*:/etc/dse/hadoop2-client/ -Djava.library.path=/usr/share/dse/hadoop2-client/lib/native:/usr/share/dse/cassandra/lib/sigar-bin: -Dcassandra.logdir=/var/log/cassandra -XX:MaxHeapFreeRatio=50 -XX:MinHeapFreeRatio=20 -Dguice_include_stack_traces=OFF -Ddse.system_memory_in_mb=32174 -Dcassandra.config.loader=com.datastax.bdp.config.DseConfigurationLoader -Dlogback.configurationFile=/etc/dse/spark/logback-spark.xml -Dcassandra.logdir=/var/log/cassandra -Ddse.client.configuration.impl=com.datastax.bdp.transport.client.HadoopBasedClientConfiguration -Dderby.stream.error.method=com.datastax.bdp.derby.LogbackBridge.getLogger -Xmx1024M org.apache.spark.deploy.SparkSubmit --class org.apache.spark.sql.hive.thriftserver.HiveThriftServer2 spark-internal
- ========================================
- WARN 2017-05-19 14:11:31 org.apache.spark.SparkContext: Use an existing SparkContext, some configuration may not take effect.
- WARN 2017-05-19 14:11:36 org.apache.hadoop.hive.metastore.HiveMetaStore: Retrying creating default database after error: Unexpected exception caught.
- javax.jdo.JDOFatalInternalException: Unexpected exception caught.
- at javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1193) ~[jdo-api-3.0.1.jar:3.0.1]
- at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808) ~[jdo-api-3.0.1.jar:3.0.1]
- at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701) ~[jdo-api-3.0.1.jar:3.0.1]
- at org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:365) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:394) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:291) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76) ~[hadoop-common-2.7.1.3.jar:na]
- at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136) ~[hadoop-common-2.7.1.3.jar:na]
- at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593) [hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571) [hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:620) [hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461) [hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66) [hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72) [hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762) [hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199) [hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74) [hive-exec-1.2.1.spark2.jar:1.2.1.spark2]
- at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) [na:1.8.0_112]
- at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) [na:1.8.0_112]
- at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) [na:1.8.0_112]
- at java.lang.reflect.Constructor.newInstance(Constructor.java:423) [na:1.8.0_112]
- at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521) [hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86) [hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132) [hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) [hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005) [hive-exec-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024) [hive-exec-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234) [hive-exec-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174) [hive-exec-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166) [hive-exec-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503) [hive-exec-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:189) [spark-hive_2.11-2.0.2.6.jar:2.0.2.6]
- at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:247) [spark-hive_2.11-2.0.2.6.jar:2.0.2.6]
- at org.apache.spark.sql.hive.HiveUtils$.newClientForExecution(HiveUtils.scala:250) [spark-hive_2.11-2.0.2.6.jar:2.0.2.6]
- at org.apache.spark.sql.hive.thriftserver.HiveThriftServer2$.main(HiveThriftServer2.scala:88) [spark-hive-thriftserver_2.11-2.0.2.6.jar:2.0.2.6]
- at org.apache.spark.sql.hive.thriftserver.HiveThriftServer2.main(HiveThriftServer2.scala) [spark-hive-thriftserver_2.11-2.0.2.6.jar:2.0.2.6]
- at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[na:1.8.0_112]
- at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[na:1.8.0_112]
- at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:1.8.0_112]
- at java.lang.reflect.Method.invoke(Method.java:498) ~[na:1.8.0_112]
- at org.apache.spark.deploy.DseSparkSubmit$.org$apache$spark$deploy$DseSparkSubmit$$runMain(DseSparkSubmit.scala:730) [dse-spark-5.1.0.jar:5.1.0]
- at org.apache.spark.deploy.DseSparkSubmit$.doRunMain$1(DseSparkSubmit.scala:175) [dse-spark-5.1.0.jar:5.1.0]
- at org.apache.spark.deploy.DseSparkSubmit$.submit(DseSparkSubmit.scala:200) [dse-spark-5.1.0.jar:5.1.0]
- at org.apache.spark.deploy.DseSparkSubmit$.main(DseSparkSubmit.scala:109) [dse-spark-5.1.0.jar:5.1.0]
- at org.apache.spark.deploy.DseSparkSubmitBootstrapper$.main(DseSparkSubmitBootstrapper.scala:74) [dse-spark-5.1.0.jar:5.1.0]
- at org.apache.spark.deploy.DseSparkSubmitBootstrapper.main(DseSparkSubmitBootstrapper.scala) [dse-spark-5.1.0.jar:5.1.0]
- Caused by: java.lang.reflect.InvocationTargetException: null
- at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[na:1.8.0_112]
- at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[na:1.8.0_112]
- at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:1.8.0_112]
- at java.lang.reflect.Method.invoke(Method.java:498) ~[na:1.8.0_112]
- at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965) ~[jdo-api-3.0.1.jar:3.0.1]
- at java.security.AccessController.doPrivileged(Native Method) ~[na:1.8.0_112]
- at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960) ~[jdo-api-3.0.1.jar:3.0.1]
- at javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166) ~[jdo-api-3.0.1.jar:3.0.1]
- ... 48 common frames omitted
- Caused by: java.lang.NoClassDefFoundError: org/apache/log4j/or/RendererMap
- at org.apache.log4j.Hierarchy.<init>(Hierarchy.java:97) ~[apache-log4j-extras-1.2.17.jar:na]
- at org.apache.log4j.LogManager.<clinit>(LogManager.java:82) ~[apache-log4j-extras-1.2.17.jar:na]
- at org.apache.log4j.Logger.getLogger(Logger.java:104) ~[apache-log4j-extras-1.2.17.jar:na]
- at org.datanucleus.util.Log4JLogger.<init>(Log4JLogger.java:49) ~[datanucleus-core-3.2.10.jar:na]
- at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) [na:1.8.0_112]
- at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) [na:1.8.0_112]
- at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) [na:1.8.0_112]
- at java.lang.reflect.Constructor.newInstance(Constructor.java:423) [na:1.8.0_112]
- at org.datanucleus.util.NucleusLogger.getLoggerInstance(NucleusLogger.java:237) ~[datanucleus-core-3.2.10.jar:na]
- at org.datanucleus.util.NucleusLogger.<clinit>(NucleusLogger.java:205) ~[datanucleus-core-3.2.10.jar:na]
- at org.datanucleus.plugin.PluginRegistryFactory.newPluginRegistry(PluginRegistryFactory.java:74) ~[datanucleus-core-3.2.10.jar:na]
- at org.datanucleus.plugin.PluginManager.<init>(PluginManager.java:61) ~[datanucleus-core-3.2.10.jar:na]
- at org.datanucleus.plugin.PluginManager.createPluginManager(PluginManager.java:427) ~[datanucleus-core-3.2.10.jar:na]
- at org.datanucleus.NucleusContext.<init>(NucleusContext.java:266) ~[datanucleus-core-3.2.10.jar:na]
- at org.datanucleus.NucleusContext.<init>(NucleusContext.java:247) ~[datanucleus-core-3.2.10.jar:na]
- at org.datanucleus.NucleusContext.<init>(NucleusContext.java:225) ~[datanucleus-core-3.2.10.jar:na]
- at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.<init>(JDOPersistenceManagerFactory.java:416) ~[datanucleus-api-jdo-3.2.6.jar:na]
- at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:301) ~[datanucleus-api-jdo-3.2.6.jar:na]
- at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:202) ~[datanucleus-api-jdo-3.2.6.jar:na]
- ... 56 common frames omitted
- Caused by: java.lang.ClassNotFoundException: org.apache.log4j.or.RendererMap
- at java.net.URLClassLoader.findClass(URLClassLoader.java:381) ~[na:1.8.0_112]
- at java.lang.ClassLoader.loadClass(ClassLoader.java:424) ~[na:1.8.0_112]
- at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331) ~[na:1.8.0_112]
- at java.lang.ClassLoader.loadClass(ClassLoader.java:357) ~[na:1.8.0_112]
- ... 75 common frames omitted
- WARN 2017-05-19 14:11:36 hive.ql.metadata.Hive: Failed to access metastore. This class should not accessed in runtime.
- org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient
- at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1236) ~[hive-exec-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174) ~[hive-exec-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166) ~[hive-exec-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503) [hive-exec-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:189) [spark-hive_2.11-2.0.2.6.jar:2.0.2.6]
- at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:247) [spark-hive_2.11-2.0.2.6.jar:2.0.2.6]
- at org.apache.spark.sql.hive.HiveUtils$.newClientForExecution(HiveUtils.scala:250) [spark-hive_2.11-2.0.2.6.jar:2.0.2.6]
- at org.apache.spark.sql.hive.thriftserver.HiveThriftServer2$.main(HiveThriftServer2.scala:88) [spark-hive-thriftserver_2.11-2.0.2.6.jar:2.0.2.6]
- at org.apache.spark.sql.hive.thriftserver.HiveThriftServer2.main(HiveThriftServer2.scala) [spark-hive-thriftserver_2.11-2.0.2.6.jar:2.0.2.6]
- at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[na:1.8.0_112]
- at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[na:1.8.0_112]
- at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:1.8.0_112]
- at java.lang.reflect.Method.invoke(Method.java:498) ~[na:1.8.0_112]
- at org.apache.spark.deploy.DseSparkSubmit$.org$apache$spark$deploy$DseSparkSubmit$$runMain(DseSparkSubmit.scala:730) [dse-spark-5.1.0.jar:5.1.0]
- at org.apache.spark.deploy.DseSparkSubmit$.doRunMain$1(DseSparkSubmit.scala:175) [dse-spark-5.1.0.jar:5.1.0]
- at org.apache.spark.deploy.DseSparkSubmit$.submit(DseSparkSubmit.scala:200) [dse-spark-5.1.0.jar:5.1.0]
- at org.apache.spark.deploy.DseSparkSubmit$.main(DseSparkSubmit.scala:109) [dse-spark-5.1.0.jar:5.1.0]
- at org.apache.spark.deploy.DseSparkSubmitBootstrapper$.main(DseSparkSubmitBootstrapper.scala:74) [dse-spark-5.1.0.jar:5.1.0]
- at org.apache.spark.deploy.DseSparkSubmitBootstrapper.main(DseSparkSubmitBootstrapper.scala) [dse-spark-5.1.0.jar:5.1.0]
- Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient
- at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1523) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005) ~[hive-exec-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024) ~[hive-exec-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234) ~[hive-exec-1.2.1.spark2.jar:1.2.1.spark2]
- ... 18 common frames omitted
- Caused by: java.lang.reflect.InvocationTargetException: null
- at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[na:1.8.0_112]
- at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) ~[na:1.8.0_112]
- at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[na:1.8.0_112]
- at java.lang.reflect.Constructor.newInstance(Constructor.java:423) ~[na:1.8.0_112]
- at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- ... 24 common frames omitted
- Caused by: javax.jdo.JDOFatalInternalException: Unexpected exception caught.
- at javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1193) ~[jdo-api-3.0.1.jar:3.0.1]
- at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808) ~[jdo-api-3.0.1.jar:3.0.1]
- at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701) ~[jdo-api-3.0.1.jar:3.0.1]
- at org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:365) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:394) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:291) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76) ~[hadoop-common-2.7.1.3.jar:na]
- at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136) ~[hadoop-common-2.7.1.3.jar:na]
- at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:624) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74) ~[hive-exec-1.2.1.spark2.jar:1.2.1.spark2]
- ... 29 common frames omitted
- Caused by: java.lang.reflect.InvocationTargetException: null
- at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[na:1.8.0_112]
- at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[na:1.8.0_112]
- at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:1.8.0_112]
- at java.lang.reflect.Method.invoke(Method.java:498) ~[na:1.8.0_112]
- at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965) ~[jdo-api-3.0.1.jar:3.0.1]
- at java.security.AccessController.doPrivileged(Native Method) ~[na:1.8.0_112]
- at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960) ~[jdo-api-3.0.1.jar:3.0.1]
- at javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166) ~[jdo-api-3.0.1.jar:3.0.1]
- ... 48 common frames omitted
- Caused by: java.lang.NoClassDefFoundError: Could not initialize class org.datanucleus.util.NucleusLogger
- at org.datanucleus.plugin.PluginRegistryFactory.newPluginRegistry(PluginRegistryFactory.java:74) ~[datanucleus-core-3.2.10.jar:na]
- at org.datanucleus.plugin.PluginManager.<init>(PluginManager.java:61) ~[datanucleus-core-3.2.10.jar:na]
- at org.datanucleus.plugin.PluginManager.createPluginManager(PluginManager.java:427) ~[datanucleus-core-3.2.10.jar:na]
- at org.datanucleus.NucleusContext.<init>(NucleusContext.java:266) ~[datanucleus-core-3.2.10.jar:na]
- at org.datanucleus.NucleusContext.<init>(NucleusContext.java:247) ~[datanucleus-core-3.2.10.jar:na]
- at org.datanucleus.NucleusContext.<init>(NucleusContext.java:225) ~[datanucleus-core-3.2.10.jar:na]
- at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.<init>(JDOPersistenceManagerFactory.java:416) ~[datanucleus-api-jdo-3.2.6.jar:na]
- at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:301) ~[datanucleus-api-jdo-3.2.6.jar:na]
- at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:202) ~[datanucleus-api-jdo-3.2.6.jar:na]
- ... 56 common frames omitted
- WARN 2017-05-19 14:11:36 org.apache.hadoop.hive.metastore.HiveMetaStore: Retrying creating default database after error: Unexpected exception caught.
- javax.jdo.JDOFatalInternalException: Unexpected exception caught.
- at javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1193) ~[jdo-api-3.0.1.jar:3.0.1]
- at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808) ~[jdo-api-3.0.1.jar:3.0.1]
- at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701) ~[jdo-api-3.0.1.jar:3.0.1]
- at org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:365) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:394) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:291) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76) ~[hadoop-common-2.7.1.3.jar:na]
- at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136) ~[hadoop-common-2.7.1.3.jar:na]
- at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593) [hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571) [hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:620) [hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461) [hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66) [hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72) [hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762) [hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199) [hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74) [hive-exec-1.2.1.spark2.jar:1.2.1.spark2]
- at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) [na:1.8.0_112]
- at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) [na:1.8.0_112]
- at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) [na:1.8.0_112]
- at java.lang.reflect.Constructor.newInstance(Constructor.java:423) [na:1.8.0_112]
- at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521) [hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86) [hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132) [hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) [hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005) [hive-exec-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024) [hive-exec-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503) [hive-exec-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:189) [spark-hive_2.11-2.0.2.6.jar:2.0.2.6]
- at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:247) [spark-hive_2.11-2.0.2.6.jar:2.0.2.6]
- at org.apache.spark.sql.hive.HiveUtils$.newClientForExecution(HiveUtils.scala:250) [spark-hive_2.11-2.0.2.6.jar:2.0.2.6]
- at org.apache.spark.sql.hive.thriftserver.HiveThriftServer2$.main(HiveThriftServer2.scala:88) [spark-hive-thriftserver_2.11-2.0.2.6.jar:2.0.2.6]
- at org.apache.spark.sql.hive.thriftserver.HiveThriftServer2.main(HiveThriftServer2.scala) [spark-hive-thriftserver_2.11-2.0.2.6.jar:2.0.2.6]
- at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[na:1.8.0_112]
- at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[na:1.8.0_112]
- at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:1.8.0_112]
- at java.lang.reflect.Method.invoke(Method.java:498) ~[na:1.8.0_112]
- at org.apache.spark.deploy.DseSparkSubmit$.org$apache$spark$deploy$DseSparkSubmit$$runMain(DseSparkSubmit.scala:730) [dse-spark-5.1.0.jar:5.1.0]
- at org.apache.spark.deploy.DseSparkSubmit$.doRunMain$1(DseSparkSubmit.scala:175) [dse-spark-5.1.0.jar:5.1.0]
- at org.apache.spark.deploy.DseSparkSubmit$.submit(DseSparkSubmit.scala:200) [dse-spark-5.1.0.jar:5.1.0]
- at org.apache.spark.deploy.DseSparkSubmit$.main(DseSparkSubmit.scala:109) [dse-spark-5.1.0.jar:5.1.0]
- at org.apache.spark.deploy.DseSparkSubmitBootstrapper$.main(DseSparkSubmitBootstrapper.scala:74) [dse-spark-5.1.0.jar:5.1.0]
- at org.apache.spark.deploy.DseSparkSubmitBootstrapper.main(DseSparkSubmitBootstrapper.scala) [dse-spark-5.1.0.jar:5.1.0]
- Caused by: java.lang.reflect.InvocationTargetException: null
- at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[na:1.8.0_112]
- at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[na:1.8.0_112]
- at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:1.8.0_112]
- at java.lang.reflect.Method.invoke(Method.java:498) ~[na:1.8.0_112]
- at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965) ~[jdo-api-3.0.1.jar:3.0.1]
- at java.security.AccessController.doPrivileged(Native Method) ~[na:1.8.0_112]
- at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960) ~[jdo-api-3.0.1.jar:3.0.1]
- at javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166) ~[jdo-api-3.0.1.jar:3.0.1]
- ... 45 common frames omitted
- Caused by: java.lang.NoClassDefFoundError: Could not initialize class org.datanucleus.util.NucleusLogger
- at org.datanucleus.plugin.PluginRegistryFactory.newPluginRegistry(PluginRegistryFactory.java:74) ~[datanucleus-core-3.2.10.jar:na]
- at org.datanucleus.plugin.PluginManager.<init>(PluginManager.java:61) ~[datanucleus-core-3.2.10.jar:na]
- at org.datanucleus.plugin.PluginManager.createPluginManager(PluginManager.java:427) ~[datanucleus-core-3.2.10.jar:na]
- at org.datanucleus.NucleusContext.<init>(NucleusContext.java:266) ~[datanucleus-core-3.2.10.jar:na]
- at org.datanucleus.NucleusContext.<init>(NucleusContext.java:247) ~[datanucleus-core-3.2.10.jar:na]
- at org.datanucleus.NucleusContext.<init>(NucleusContext.java:225) ~[datanucleus-core-3.2.10.jar:na]
- at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.<init>(JDOPersistenceManagerFactory.java:416) ~[datanucleus-api-jdo-3.2.6.jar:na]
- at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:301) ~[datanucleus-api-jdo-3.2.6.jar:na]
- at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:202) ~[datanucleus-api-jdo-3.2.6.jar:na]
- ... 53 common frames omitted
- ERROR 2017-05-19 14:11:36 org.apache.spark.deploy.DseSparkSubmitBootstrapper: Failed to start or submit Spark application
- java.lang.RuntimeException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient
- at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:522) ~[hive-exec-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:189) ~[spark-hive_2.11-2.0.2.6.jar:2.0.2.6]
- at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:247) ~[spark-hive_2.11-2.0.2.6.jar:2.0.2.6]
- at org.apache.spark.sql.hive.HiveUtils$.newClientForExecution(HiveUtils.scala:250) ~[spark-hive_2.11-2.0.2.6.jar:2.0.2.6]
- at org.apache.spark.sql.hive.thriftserver.HiveThriftServer2$.main(HiveThriftServer2.scala:88) ~[spark-hive-thriftserver_2.11-2.0.2.6.jar:2.0.2.6]
- at org.apache.spark.sql.hive.thriftserver.HiveThriftServer2.main(HiveThriftServer2.scala) ~[spark-hive-thriftserver_2.11-2.0.2.6.jar:2.0.2.6]
- at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[na:1.8.0_112]
- at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[na:1.8.0_112]
- at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:1.8.0_112]
- at java.lang.reflect.Method.invoke(Method.java:498) ~[na:1.8.0_112]
- at org.apache.spark.deploy.DseSparkSubmit$.org$apache$spark$deploy$DseSparkSubmit$$runMain(DseSparkSubmit.scala:730) ~[dse-spark-5.1.0.jar:5.1.0]
- at org.apache.spark.deploy.DseSparkSubmit$.doRunMain$1(DseSparkSubmit.scala:175) ~[dse-spark-5.1.0.jar:5.1.0]
- at org.apache.spark.deploy.DseSparkSubmit$.submit(DseSparkSubmit.scala:200) ~[dse-spark-5.1.0.jar:5.1.0]
- at org.apache.spark.deploy.DseSparkSubmit$.main(DseSparkSubmit.scala:109) ~[dse-spark-5.1.0.jar:5.1.0]
- at org.apache.spark.deploy.DseSparkSubmitBootstrapper$.main(DseSparkSubmitBootstrapper.scala:74) ~[dse-spark-5.1.0.jar:5.1.0]
- at org.apache.spark.deploy.DseSparkSubmitBootstrapper.main(DseSparkSubmitBootstrapper.scala) [dse-spark-5.1.0.jar:5.1.0]
- Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient
- at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1523) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005) ~[hive-exec-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024) ~[hive-exec-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503) ~[hive-exec-1.2.1.spark2.jar:1.2.1.spark2]
- ... 15 common frames omitted
- Caused by: java.lang.reflect.InvocationTargetException: null
- at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[na:1.8.0_112]
- at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) ~[na:1.8.0_112]
- at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[na:1.8.0_112]
- at java.lang.reflect.Constructor.newInstance(Constructor.java:423) ~[na:1.8.0_112]
- at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- ... 21 common frames omitted
- Caused by: javax.jdo.JDOFatalInternalException: Unexpected exception caught.
- at javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1193) ~[jdo-api-3.0.1.jar:3.0.1]
- at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808) ~[jdo-api-3.0.1.jar:3.0.1]
- at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701) ~[jdo-api-3.0.1.jar:3.0.1]
- at org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:365) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:394) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:291) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76) ~[hadoop-common-2.7.1.3.jar:na]
- at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136) ~[hadoop-common-2.7.1.3.jar:na]
- at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:624) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199) ~[hive-metastore-1.2.1.spark2.jar:1.2.1.spark2]
- at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74) ~[hive-exec-1.2.1.spark2.jar:1.2.1.spark2]
- ... 26 common frames omitted
- Caused by: java.lang.reflect.InvocationTargetException: null
- at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[na:1.8.0_112]
- at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[na:1.8.0_112]
- at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:1.8.0_112]
- at java.lang.reflect.Method.invoke(Method.java:498) ~[na:1.8.0_112]
- at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965) ~[jdo-api-3.0.1.jar:3.0.1]
- at java.security.AccessController.doPrivileged(Native Method) ~[na:1.8.0_112]
- at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960) ~[jdo-api-3.0.1.jar:3.0.1]
- at javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166) ~[jdo-api-3.0.1.jar:3.0.1]
- ... 45 common frames omitted
- Caused by: java.lang.NoClassDefFoundError: Could not initialize class org.datanucleus.util.NucleusLogger
- at org.datanucleus.plugin.PluginRegistryFactory.newPluginRegistry(PluginRegistryFactory.java:74) ~[datanucleus-core-3.2.10.jar:na]
- at org.datanucleus.plugin.PluginManager.<init>(PluginManager.java:61) ~[datanucleus-core-3.2.10.jar:na]
- at org.datanucleus.plugin.PluginManager.createPluginManager(PluginManager.java:427) ~[datanucleus-core-3.2.10.jar:na]
- at org.datanucleus.NucleusContext.<init>(NucleusContext.java:266) ~[datanucleus-core-3.2.10.jar:na]
- at org.datanucleus.NucleusContext.<init>(NucleusContext.java:247) ~[datanucleus-core-3.2.10.jar:na]
- at org.datanucleus.NucleusContext.<init>(NucleusContext.java:225) ~[datanucleus-core-3.2.10.jar:na]
- at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.<init>(JDOPersistenceManagerFactory.java:416) ~[datanucleus-api-jdo-3.2.6.jar:na]
- at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:301) ~[datanucleus-api-jdo-3.2.6.jar:na]
- at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:202) ~[datanucleus-api-jdo-3.2.6.jar:na]
- ... 53 common frames omitted
Add Comment
Please, Sign In to add comment