Advertisement
Guest User

Untitled

a guest
Nov 29th, 2018
334
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Java 25.76 KB | None | 0 0
  1. java.security.AccessControlException: access denied org.apache.derby.security.SystemPermission( "engine", "usederbyinternals" )
  2.         at java.security.AccessControlContext.checkPermission(AccessControlContext.java:472)
  3.         at java.security.AccessController.checkPermission(AccessController.java:884)
  4.         at org.apache.derby.iapi.security.SecurityUtil.checkDerbyInternalsPrivilege(Unknown Source)
  5.         at org.apache.derby.iapi.services.monitor.Monitor.startMonitor(Unknown Source)
  6.         at org.apache.derby.iapi.jdbc.JDBCBoot$1.run(Unknown Source)
  7.         at java.security.AccessController.doPrivileged(Native Method)
  8.         at org.apache.derby.iapi.jdbc.JDBCBoot.boot(Unknown Source)
  9.         at org.apache.derby.iapi.jdbc.JDBCBoot.boot(Unknown Source)
  10.         at org.apache.derby.jdbc.EmbeddedDriver.boot(Unknown Source)
  11.         at org.apache.derby.jdbc.EmbeddedDriver.<clinit>(Unknown Source)
  12.         at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
  13.         at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
  14.         at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
  15.         at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
  16.         at java.lang.Class.newInstance(Class.java:442)
  17.         at org.datanucleus.store.rdbms.connectionpool.AbstractConnectionPoolFactory.loadDriver(AbstractConnectionPoolFactory.java:47)
  18.         at org.datanucleus.store.rdbms.connectionpool.BoneCPConnectionPoolFactory.createConnectionPool(BoneCPConnectionPoolFactory.java:54)
  19.         at org.datanucleus.store.rdbms.ConnectionFactoryImpl.generateDataSources(ConnectionFactoryImpl.java:238)
  20.         at org.datanucleus.store.rdbms.ConnectionFactoryImpl.initialiseDataSources(ConnectionFactoryImpl.java:131)
  21.         at org.datanucleus.store.rdbms.ConnectionFactoryImpl.<init>(ConnectionFactoryImpl.java:85)
  22.         at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
  23.         at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
  24.         at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
  25.         at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
  26.         at org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:631)
  27.         at org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:325)
  28.         at org.datanucleus.store.AbstractStoreManager.registerConnectionFactory(AbstractStoreManager.java:282)
  29.         at org.datanucleus.store.AbstractStoreManager.<init>(AbstractStoreManager.java:240)
  30.         at org.datanucleus.store.rdbms.RDBMSStoreManager.<init>(RDBMSStoreManager.java:286)
  31.         at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
  32.         at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
  33.         at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
  34.         at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
  35.         at org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:631)
  36.         at org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:301)
  37.         at org.datanucleus.NucleusContext.createStoreManagerForProperties(NucleusContext.java:1187)
  38.         at org.datanucleus.NucleusContext.initialise(NucleusContext.java:356)
  39.         at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.freezeConfiguration(JDOPersistenceManagerFactory.java:775)
  40.         at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:333)
  41.         at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:202)
  42.         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  43.         at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  44.         at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  45.         at java.lang.reflect.Method.invoke(Method.java:498)
  46.         at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965)
  47.         at java.security.AccessController.doPrivileged(Native Method)
  48.         at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960)
  49.         at javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166)
  50.         at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
  51.         at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
  52.         at org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:365)
  53.         at org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:394)
  54.         at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:291)
  55.         at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258)
  56.         at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76)
  57.         at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)
  58.         at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57)
  59.         at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66)
  60.         at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593)
  61.         at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571)
  62.         at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:620)
  63.         at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461)
  64.         at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66)
  65.         at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72)
  66.         at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762)
  67.         at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199)
  68.         at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
  69.         at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
  70.         at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
  71.         at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
  72.         at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
  73.         at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521)
  74.         at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86)
  75.         at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132)
  76.         at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
  77.         at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005)
  78.         at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024)
  79.         at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234)
  80.         at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174)
  81.         at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166)
  82.         at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503)
  83.         at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:191)
  84.         at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
  85.         at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
  86.         at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
  87.         at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
  88.         at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:264)
  89.         at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:362)
  90.         at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:266)
  91.         at org.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:66)
  92.         at org.apache.spark.sql.hive.HiveExternalCatalog.client(HiveExternalCatalog.scala:65)
  93.         at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply$mcZ$sp(HiveExternalCatalog.scala:195)
  94.         at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:195)
  95.         at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:195)
  96.         at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97)
  97.         at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:194)
  98.         at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:105)
  99.         at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:93)
  100.         at org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:39)
  101.         at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog$lzycompute(HiveSessionStateBuilder.scala:54)
  102.         at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:52)
  103.         at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:35)
  104.         at org.apache.spark.sql.internal.BaseSessionStateBuilder.build(BaseSessionStateBuilder.scala:289)
  105.         at org.apache.spark.sql.SparkSession$.org$apache$spark$sql$SparkSession$$instantiateSessionState(SparkSession.scala:1059)
  106.         at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:137)
  107.         at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:136)
  108.         at scala.Option.getOrElse(Option.scala:121)
  109.         at org.apache.spark.sql.SparkSession.sessionState$lzycompute(SparkSession.scala:136)
  110.         at org.apache.spark.sql.SparkSession.sessionState(SparkSession.scala:133)
  111.         at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:66)
  112.         at org.apache.spark.sql.SparkSession.createDataFrame(SparkSession.scala:587)
  113.         at org.apache.spark.sql.SparkSession.createDataFrame(SparkSession.scala:344)
  114.         at scala.it.agilelab.bigdata.wasp.contacthistory.consumers.spark.strategies.InnerNormalizationStrategySpec$$anonfun$1$$anonfun$apply$mcV$sp$1$$anonfun$apply$mcV$sp$9.apply(InnerNormalizationStrategySpec.scala:82)
  115.         at scala.it.agilelab.bigdata.wasp.contacthistory.consumers.spark.strategies.InnerNormalizationStrategySpec$$anonfun$1$$anonfun$apply$mcV$sp$1$$anonfun$apply$mcV$sp$9.apply(InnerNormalizationStrategySpec.scala:78)
  116.         at it.agilelab.bigdata.wasp.contacthistory.models.SparkSetup$class.withSparkSession(SparkSetup.scala:22)
  117.         at scala.it.agilelab.bigdata.wasp.contacthistory.consumers.spark.strategies.InnerNormalizationStrategySpec.withSparkSession(InnerNormalizationStrategySpec.scala:17)
  118.         at scala.it.agilelab.bigdata.wasp.contacthistory.consumers.spark.strategies.InnerNormalizationStrategySpec$$anonfun$1$$anonfun$apply$mcV$sp$1.apply$mcV$sp(InnerNormalizationStrategySpec.scala:78)
  119.         at scala.it.agilelab.bigdata.wasp.contacthistory.consumers.spark.strategies.InnerNormalizationStrategySpec$$anonfun$1$$anonfun$apply$mcV$sp$1.apply(InnerNormalizationStrategySpec.scala:78)
  120.         at scala.it.agilelab.bigdata.wasp.contacthistory.consumers.spark.strategies.InnerNormalizationStrategySpec$$anonfun$1$$anonfun$apply$mcV$sp$1.apply(InnerNormalizationStrategySpec.scala:78)
  121.         at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
  122.         at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
  123.         at org.scalatest.Transformer.apply(Transformer.scala:22)
  124.         at org.scalatest.Transformer.apply(Transformer.scala:20)
  125.         at org.scalatest.WordSpecLike$$anon$1.apply(WordSpecLike.scala:1078)
  126.         at org.scalatest.TestSuite$class.withFixture(TestSuite.scala:196)
  127.         at org.scalatest.WordSpec.withFixture(WordSpec.scala:1881)
  128.         at org.scalatest.WordSpecLike$class.invokeWithFixture$1(WordSpecLike.scala:1075)
  129.         at org.scalatest.WordSpecLike$$anonfun$runTest$1.apply(WordSpecLike.scala:1088)
  130.         at org.scalatest.WordSpecLike$$anonfun$runTest$1.apply(WordSpecLike.scala:1088)
  131.         at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)
  132.         at org.scalatest.WordSpecLike$class.runTest(WordSpecLike.scala:1088)
  133.         at org.scalatest.WordSpec.runTest(WordSpec.scala:1881)
  134.         at org.scalatest.WordSpecLike$$anonfun$runTests$1.apply(WordSpecLike.scala:1147)
  135.         at org.scalatest.WordSpecLike$$anonfun$runTests$1.apply(WordSpecLike.scala:1147)
  136.         at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:396)
  137.         at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:384)
  138.         at scala.collection.immutable.List.foreach(List.scala:392)
  139.         at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)
  140.         at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:373)
  141.         at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:410)
  142.         at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:384)
  143.         at scala.collection.immutable.List.foreach(List.scala:392)
  144.         at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)
  145.         at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:379)
  146.         at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)
  147.         at org.scalatest.WordSpecLike$class.runTests(WordSpecLike.scala:1147)
  148.         at org.scalatest.WordSpec.runTests(WordSpec.scala:1881)
  149.         at org.scalatest.Suite$class.run(Suite.scala:1147)
  150.         at org.scalatest.WordSpec.org$scalatest$WordSpecLike$$super$run(WordSpec.scala:1881)
  151.         at org.scalatest.WordSpecLike$$anonfun$run$1.apply(WordSpecLike.scala:1192)
  152.         at org.scalatest.WordSpecLike$$anonfun$run$1.apply(WordSpecLike.scala:1192)
  153.         at org.scalatest.SuperEngine.runImpl(Engine.scala:521)
  154.         at org.scalatest.WordSpecLike$class.run(WordSpecLike.scala:1192)
  155.         at scala.it.agilelab.bigdata.wasp.contacthistory.consumers.spark.strategies.InnerNormalizationStrategySpec.org$scalatest$BeforeAndAfterAll$$super$run(InnerNormalizationStrategySpec.scala:17)
  156.         at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:213)
  157.         at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:210)
  158.         at scala.it.agilelab.bigdata.wasp.contacthistory.consumers.spark.strategies.InnerNormalizationStrategySpec.run(InnerNormalizationStrategySpec.scala:17)
  159.         at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:314)
  160.         at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:480)
  161.         at sbt.TestRunner.runTest$1(TestFramework.scala:106)
  162.         at sbt.TestRunner.run(TestFramework.scala:117)
  163.         at sbt.TestFramework$$anon$2$$anonfun$$lessinit$greater$1.$anonfun$apply$1(TestFramework.scala:262)
  164.         at sbt.TestFramework$.sbt$TestFramework$$withContextLoader(TestFramework.scala:233)
  165.         at sbt.TestFramework$$anon$2$$anonfun$$lessinit$greater$1.apply(TestFramework.scala:262)
  166.         at sbt.TestFramework$$anon$2$$anonfun$$lessinit$greater$1.apply(TestFramework.scala:262)
  167.         at sbt.TestFunction.apply(TestFramework.scala:271)
  168.         at sbt.Tests$.$anonfun$toTask$1(Tests.scala:281)
  169.         at sbt.std.Transform$$anon$3.$anonfun$apply$2(System.scala:46)
  170.         at sbt.std.Transform$$anon$4.work(System.scala:66)
  171.         at sbt.Execute.$anonfun$submit$2(Execute.scala:262)
  172.         at sbt.internal.util.ErrorHandling$.wideConvert(ErrorHandling.scala:16)
  173.         at sbt.Execute.work(Execute.scala:271)
  174.         at sbt.Execute.$anonfun$submit$1(Execute.scala:262)
  175.         at sbt.ConcurrentRestrictions$$anon$4.$anonfun$submitValid$1(ConcurrentRestrictions.scala:174)
  176.         at sbt.CompletionService$$anon$2.call(CompletionService.scala:36)
  177.         at java.util.concurrent.FutureTask.run(FutureTask.java:266)
  178.         at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
  179.         at java.util.concurrent.FutureTask.run(FutureTask.java:266)
  180.         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
  181.         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
  182.         at java.lang.Thread.run(Thread.java:748)
  183.  - should be normalized without errors *** FAILED ***
  184. [info]   java.lang.IllegalArgumentException: Error while instantiating 'org.apache.spark.sql.hive.HiveSessionStateBuilder':
  185. [info]   at org.apache.spark.sql.SparkSession$.org$apache$spark$sql$SparkSession$$instantiateSessionState(SparkSession.scala:1062)
  186. [info]   at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:137)
  187. [info]   at org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:136)
  188. [info]   at scala.Option.getOrElse(Option.scala:121)
  189. [info]   at org.apache.spark.sql.SparkSession.sessionState$lzycompute(SparkSession.scala:136)
  190. [info]   at org.apache.spark.sql.SparkSession.sessionState(SparkSession.scala:133)
  191. [info]   at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:66)
  192. [info]   at org.apache.spark.sql.SparkSession.createDataFrame(SparkSession.scala:587)
  193. [info]   at org.apache.spark.sql.SparkSession.createDataFrame(SparkSession.scala:344)
  194. [info]   at scala.it.agilelab.bigdata.wasp.contacthistory.consumers.spark.strategies.InnerNormalizationStrategySpec$$anonfun$1$$anonfun$apply$mcV$sp$1$$anonfun$apply$mcV$sp$9.apply(InnerNormalizationStrategySpec.scala:82)
  195. [info]   ...
  196. [info]   Cause: org.apache.spark.sql.AnalysisException: java.lang.RuntimeException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient;
  197. [info]   at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:106)
  198. [info]   at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:194)
  199. [info]   at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:105)
  200. [info]   at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:93)
  201. [info]   at org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:39)
  202. [info]   at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog$lzycompute(HiveSessionStateBuilder.scala:54)
  203. [info]   at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:52)
  204. [info]   at org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:35)
  205. [info]   at org.apache.spark.sql.internal.BaseSessionStateBuilder.build(BaseSessionStateBuilder.scala:289)
  206. [info]   at org.apache.spark.sql.SparkSession$.org$apache$spark$sql$SparkSession$$instantiateSessionState(SparkSession.scala:1059)
  207. [info]   ...
  208. [info]   Cause: java.lang.RuntimeException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient
  209. [info]   at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:522)
  210. [info]   at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:191)
  211. [info]   at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
  212. [info]   at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
  213. [info]   at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
  214. [info]   at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
  215. [info]   at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:264)
  216. [info]   at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:362)
  217. [info]   at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:266)
  218. [info]   at org.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:66)
  219. [info]   ...
  220. [info]   Cause: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient
  221. [info]   at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1523)
  222. [info]   at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86)
  223. [info]   at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132)
  224. [info]   at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
  225. [info]   at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005)
  226. [info]   at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024)
  227. [info]   at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503)
  228. [info]   at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:191)
  229. [info]   at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
  230. [info]   at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
  231. [info]   ...
  232. [info]   Cause: java.lang.reflect.InvocationTargetException:
  233. [info]   at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
  234. [info]   at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
  235. [info]   at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
  236. [info]   at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
  237. [info]   at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521)
  238. [info]   at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86)
  239. [info]   at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132)
  240. [info]   at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
  241. [info]   at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005)
  242. [info]   at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024)
  243. [info]   ...
  244. [info]   Cause: javax.jdo.JDOFatalInternalException: Error creating transactional connection factory
  245. [info]   at org.datanucleus.api.jdo.NucleusJDOHelper.getJDOExceptionForNucleusException(NucleusJDOHelper.java:587)
  246. [info]   at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.freezeConfiguration(JDOPersistenceManagerFactory.java:788)
  247. [info]   at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:333)
  248. [info]   at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:202)
  249. [info]   at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  250. [info]   at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  251. [info]   at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  252. [info]   at java.lang.reflect.Method.invoke(Method.java:498)
  253. [info]   at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965)
  254. [info]   at java.security.AccessController.doPrivileged(Native Method)
  255. [info]   ...
  256. [info]   Cause: java.lang.reflect.InvocationTargetException:
  257. [info]   at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
  258. [info]   at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
  259. [info]   at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
  260. [info]   at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
  261. [info]   at org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:631)
  262. [info]   at org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:325)
  263. [info]   at org.datanucleus.store.AbstractStoreManager.registerConnectionFactory(AbstractStoreManager.java:282)
  264. [info]   at org.datanucleus.store.AbstractStoreManager.<init>(AbstractStoreManager.java:240)
  265. [info]   at org.datanucleus.store.rdbms.RDBMSStoreManager.<init>(RDBMSStoreManager.java:286)
  266. [info]   at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
  267. [info]   ...
  268. [info]   Cause: java.lang.NoClassDefFoundError: Could not initialize class org.apache.derby.jdbc.EmbeddedDriver
  269. [info]   at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
  270. [info]   at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
  271. [info]   at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
  272. [info]   at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
  273. [info]   at java.lang.Class.newInstance(Class.java:442)
  274. [info]   at org.datanucleus.store.rdbms.connectionpool.AbstractConnectionPoolFactory.loadDriver(AbstractConnectionPoolFactory.java:47)
  275. [info]   at org.datanucleus.store.rdbms.connectionpool.BoneCPConnectionPoolFactory.createConnectionPool(BoneCPConnectionPoolFactory.java:54)
  276. [info]   at org.datanucleus.store.rdbms.ConnectionFactoryImpl.generateDataSources(ConnectionFactoryImpl.java:238)
  277. [info]   at org.datanucleus.store.rdbms.ConnectionFactoryImpl.initialiseDataSources(ConnectionFactoryImpl.java:131)
  278. [info]   at org.datanucleus.store.rdbms.ConnectionFactoryImpl.<init>(ConnectionFactoryImpl.java:85)
  279. [info]   ...
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement