Guest User

Untitled

a guest
Mar 25th, 2019
223
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 11.72 KB | None | 0 0
  1. 2019-03-25 11:26:31 WARN Hive:168 - Failed to access metastore. This class should not accessed in runtime.
  2. org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient
  3. at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1236)
  4. at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174)
  5. at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166)
  6. at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503)
  7. at org.apache.spark.sql.hive.client.HiveClientImpl.newState(HiveClientImpl.scala:183)
  8. at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:117)
  9. at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
  10. at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
  11. at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
  12. at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
  13. at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:272)
  14. at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:384)
  15. at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:286)
  16. at org.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:66)
  17. at org.apache.spark.sql.hive.HiveExternalCatalog.client(HiveExternalCatalog.scala:65)
  18. at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply$mcZ$sp(HiveExternalCatalog.scala:215)
  19. at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:215)
  20. at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:215)
  21. at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97)
  22. at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:214)
  23. at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:114)
  24. at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:102)
  25. at org.apache.spark.sql.hive.HiveSessionStateBuilder.org$apache$spark$sql$hive$HiveSessionStateBuilder$$externalCatalog(HiveSessionStateBuilder.scala:39)
  26. at org.apache.spark.sql.hive.HiveSessionStateBuilder$$anonfun$1.apply(HiveSessionStateBuilder.scala:54)
  27. at org.apache.spark.sql.hive.HiveSessionStateBuilder$$anonfun$1.apply(HiveSessionStateBuilder.scala:54)
  28. at org.apache.spark.sql.catalyst.catalog.SessionCatalog.externalCatalog$lzycompute(SessionCatalog.scala:90)
  29. at org.apache.spark.sql.catalyst.catalog.SessionCatalog.externalCatalog(SessionCatalog.scala:90)
  30. at org.apache.spark.sql.catalyst.catalog.SessionCatalog.tableExists(SessionCatalog.scala:415)
  31. at org.apache.spark.sql.DataFrameWriter.saveAsTable(DataFrameWriter.scala:405)
  32. at org.apache.spark.sql.DataFrameWriter.saveAsTable(DataFrameWriter.scala:400)
  33. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  34. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  35. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  36. at java.lang.reflect.Method.invoke(Method.java:498)
  37. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
  38. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
  39. at py4j.Gateway.invoke(Gateway.java:282)
  40. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
  41. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  42. at py4j.GatewayConnection.run(GatewayConnection.java:238)
  43. at java.lang.Thread.run(Thread.java:748)
  44. Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient
  45. at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1523)
  46. at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86)
  47. at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132)
  48. at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
  49. at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005)
  50. at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024)
  51. at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234)
  52. ... 40 more
  53. Caused by: java.lang.reflect.InvocationTargetException
  54. at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
  55. at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
  56. at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
  57. at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
  58. at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521)
  59. ... 46 more
  60. Caused by: java.lang.NullPointerException
  61. at org.apache.thrift.transport.TSocket.open(TSocket.java:209)
  62. at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:420)
  63. at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:236)
  64. at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
  65. ... 51 more
  66. Traceback (most recent call last):
  67. File "/usr/local/spark/python/pyspark/sql/utils.py", line 63, in deco
  68. return f(*a, **kw)
  69. File "/usr/local/spark/python/lib/py4j-0.10.7-src.zip/py4j/protocol.py", line 328, in get_return_value
  70. py4j.protocol.Py4JJavaError: An error occurred while calling o47.saveAsTable.
  71. : org.apache.spark.sql.AnalysisException: java.lang.RuntimeException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient;
  72. at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:106)
  73. at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:214)
  74. at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:114)
  75. at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:102)
  76. at org.apache.spark.sql.hive.HiveSessionStateBuilder.org$apache$spark$sql$hive$HiveSessionStateBuilder$$externalCatalog(HiveSessionStateBuilder.scala:39)
  77. at org.apache.spark.sql.hive.HiveSessionStateBuilder$$anonfun$1.apply(HiveSessionStateBuilder.scala:54)
  78. at org.apache.spark.sql.hive.HiveSessionStateBuilder$$anonfun$1.apply(HiveSessionStateBuilder.scala:54)
  79. at org.apache.spark.sql.catalyst.catalog.SessionCatalog.externalCatalog$lzycompute(SessionCatalog.scala:90)
  80. at org.apache.spark.sql.catalyst.catalog.SessionCatalog.externalCatalog(SessionCatalog.scala:90)
  81. at org.apache.spark.sql.catalyst.catalog.SessionCatalog.tableExists(SessionCatalog.scala:415)
  82. at org.apache.spark.sql.DataFrameWriter.saveAsTable(DataFrameWriter.scala:405)
  83. at org.apache.spark.sql.DataFrameWriter.saveAsTable(DataFrameWriter.scala:400)
  84. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  85. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  86. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  87. at java.lang.reflect.Method.invoke(Method.java:498)
  88. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
  89. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
  90. at py4j.Gateway.invoke(Gateway.java:282)
  91. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
  92. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  93. at py4j.GatewayConnection.run(GatewayConnection.java:238)
  94. at java.lang.Thread.run(Thread.java:748)
  95. Caused by: java.lang.RuntimeException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient
  96. at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:522)
  97. at org.apache.spark.sql.hive.client.HiveClientImpl.newState(HiveClientImpl.scala:183)
  98. at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:117)
  99. at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
  100. at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
  101. at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
  102. at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
  103. at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:272)
  104. at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:384)
  105. at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:286)
  106. at org.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:66)
  107. at org.apache.spark.sql.hive.HiveExternalCatalog.client(HiveExternalCatalog.scala:65)
  108. at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply$mcZ$sp(HiveExternalCatalog.scala:215)
  109. at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:215)
  110. at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:215)
  111. at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97)
  112. ... 22 more
  113. Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient
  114. at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1523)
  115. at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86)
  116. at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132)
  117. at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
  118. at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005)
  119. at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024)
  120. at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503)
  121. ... 37 more
  122. Caused by: java.lang.reflect.InvocationTargetException
  123. at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
  124. at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
  125. at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
  126. at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
  127. at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521)
  128. ... 43 more
  129. Caused by: java.lang.NullPointerException
  130. at org.apache.thrift.transport.TSocket.open(TSocket.java:209)
  131. at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:420)
  132. at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:236)
  133. at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
  134. ... 48 more
  135.  
  136.  
  137. During handling of the above exception, another exception occurred:
  138.  
  139. Traceback (most recent call last):
  140. File "<stdin>", line 2, in <module>
  141. File "/usr/local/spark/python/pyspark/sql/readwriter.py", line 775, in saveAsTable
  142. self._jwrite.saveAsTable(name)
  143. File "/usr/local/spark/python/lib/py4j-0.10.7-src.zip/py4j/java_gateway.py", line 1257, in __call__
  144. File "/usr/local/spark/python/pyspark/sql/utils.py", line 69, in deco
  145. raise AnalysisException(s.split(': ', 1)[1], stackTrace)
  146. pyspark.sql.utils.AnalysisException: 'java.lang.RuntimeException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient;'
Add Comment
Please, Sign In to add comment