Advertisement
Guest User

Untitled

a guest
Aug 11th, 2020
159
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 43.74 KB | None | 0 0
  1. 20/08/11 12:12:03 WARN metastore: set_ugi() not successful, Likely cause: new client talking to old server. Continuing without it.
  2. org.apache.thrift.transport.TTransportException
  3. at org.apache.thrift.transport.TIOStreamTransport.read(TIOStreamTransport.java:132)
  4. at org.apache.thrift.transport.TTransport.readAll(TTransport.java:86)
  5. at org.apache.thrift.protocol.TBinaryProtocol.readAll(TBinaryProtocol.java:425)
  6. at org.apache.thrift.protocol.TBinaryProtocol.readI32(TBinaryProtocol.java:321)
  7. at org.apache.thrift.protocol.TBinaryProtocol.readMessageBegin(TBinaryProtocol.java:225)
  8. at org.apache.thrift.TServiceClient.receiveBase(TServiceClient.java:77)
  9. at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.recv_set_ugi(ThriftHiveMetastore.java:4247)
  10. at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.set_ugi(ThriftHiveMetastore.java:4233)
  11. at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:498)
  12. at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:247)
  13. at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:70)
  14. at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
  15. at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
  16. at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
  17. at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
  18. at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1707)
  19. at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:83)
  20. at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133)
  21. at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
  22. at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600)
  23. at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652)
  24. at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632)
  25. at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894)
  26. at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248)
  27. at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231)
  28. at org.apache.hadoop.hive.ql.metadata.Hive.<init>(Hive.java:388)
  29. at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332)
  30. at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312)
  31. at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288)
  32. at org.apache.spark.sql.hive.client.HiveClientImpl.client(HiveClientImpl.scala:288)
  33. at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:314)
  34. at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:255)
  35. at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:254)
  36. at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:304)
  37. at org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:413)
  38. at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:221)
  39. at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
  40. at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:99)
  41. at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:221)
  42. at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:154)
  43. at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:144)
  44. at org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:40)
  45. at org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$resourceLoader$1(HiveSessionStateBuilder.scala:47)
  46. at org.apache.spark.sql.hive.HiveSessionResourceLoader.client$lzycompute(HiveSessionStateBuilder.scala:115)
  47. at org.apache.spark.sql.hive.HiveSessionResourceLoader.client(HiveSessionStateBuilder.scala:115)
  48. at org.apache.spark.sql.hive.HiveSessionResourceLoader.addJar(HiveSessionStateBuilder.scala:117)
  49. at org.apache.spark.sql.execution.command.AddJarCommand.run(resources.scala:40)
  50. at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
  51. at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
  52. at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79)
  53. at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:226)
  54. at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3472)
  55. at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$4(SQLExecution.scala:100)
  56. at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
  57. at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:87)
  58. at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3468)
  59. at org.apache.spark.sql.Dataset.<init>(Dataset.scala:226)
  60. at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:96)
  61. at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:607)
  62. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  63. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  64. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  65. at java.lang.reflect.Method.invoke(Method.java:498)
  66. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
  67. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
  68. at py4j.Gateway.invoke(Gateway.java:282)
  69. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
  70. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  71. at py4j.GatewayConnection.run(GatewayConnection.java:238)
  72. at java.lang.Thread.run(Thread.java:748)
  73. 20/08/11 12:12:03 INFO metastore: Connected to metastore.
  74. 20/08/11 12:12:03 WARN RetryingMetaStoreClient: MetaStoreClient lost connection. Attempting to reconnect (1 of 1) after 1s. getAllFunctions
  75. org.apache.thrift.transport.TTransportException
  76. at org.apache.thrift.transport.TIOStreamTransport.read(TIOStreamTransport.java:132)
  77. at org.apache.thrift.transport.TTransport.readAll(TTransport.java:86)
  78. at org.apache.thrift.protocol.TBinaryProtocol.readAll(TBinaryProtocol.java:425)
  79. at org.apache.thrift.protocol.TBinaryProtocol.readI32(TBinaryProtocol.java:321)
  80. at org.apache.thrift.protocol.TBinaryProtocol.readMessageBegin(TBinaryProtocol.java:225)
  81. at org.apache.thrift.TServiceClient.receiveBase(TServiceClient.java:77)
  82. at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.recv_get_all_functions(ThriftHiveMetastore.java:3845)
  83. at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.get_all_functions(ThriftHiveMetastore.java:3833)
  84. at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.getAllFunctions(HiveMetaStoreClient.java:2399)
  85. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  86. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  87. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  88. at java.lang.reflect.Method.invoke(Method.java:498)
  89. at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:173)
  90. at com.sun.proxy.$Proxy32.getAllFunctions(Unknown Source)
  91. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  92. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  93. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  94. at java.lang.reflect.Method.invoke(Method.java:498)
  95. at org.apache.hadoop.hive.metastore.HiveMetaStoreClient$SynchronizedHandler.invoke(HiveMetaStoreClient.java:2336)
  96. at com.sun.proxy.$Proxy32.getAllFunctions(Unknown Source)
  97. at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894)
  98. at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248)
  99. at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231)
  100. at org.apache.hadoop.hive.ql.metadata.Hive.<init>(Hive.java:388)
  101. at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332)
  102. at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312)
  103. at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288)
  104. at org.apache.spark.sql.hive.client.HiveClientImpl.client(HiveClientImpl.scala:288)
  105. at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:314)
  106. at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:255)
  107. at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:254)
  108. at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:304)
  109. at org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:413)
  110. at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:221)
  111. at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
  112. at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:99)
  113. at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:221)
  114. at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:154)
  115. at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:144)
  116. at org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:40)
  117. at org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$resourceLoader$1(HiveSessionStateBuilder.scala:47)
  118. at org.apache.spark.sql.hive.HiveSessionResourceLoader.client$lzycompute(HiveSessionStateBuilder.scala:115)
  119. at org.apache.spark.sql.hive.HiveSessionResourceLoader.client(HiveSessionStateBuilder.scala:115)
  120. at org.apache.spark.sql.hive.HiveSessionResourceLoader.addJar(HiveSessionStateBuilder.scala:117)
  121. at org.apache.spark.sql.execution.command.AddJarCommand.run(resources.scala:40)
  122. at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
  123. at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
  124. at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79)
  125. at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:226)
  126. at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3472)
  127. at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$4(SQLExecution.scala:100)
  128. at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
  129. at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:87)
  130. at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3468)
  131. at org.apache.spark.sql.Dataset.<init>(Dataset.scala:226)
  132. at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:96)
  133. at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:607)
  134. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  135. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  136. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  137. at java.lang.reflect.Method.invoke(Method.java:498)
  138. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
  139. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
  140. at py4j.Gateway.invoke(Gateway.java:282)
  141. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
  142. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  143. at py4j.GatewayConnection.run(GatewayConnection.java:238)
  144. at java.lang.Thread.run(Thread.java:748)
  145. 20/08/11 12:12:04 WARN TIOStreamTransport: Error closing output stream.
  146. java.net.SocketException: Socket closed
  147. at java.net.SocketOutputStream.socketWrite(SocketOutputStream.java:118)
  148. at java.net.SocketOutputStream.write(SocketOutputStream.java:155)
  149. at java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:82)
  150. at java.io.BufferedOutputStream.flush(BufferedOutputStream.java:140)
  151. at java.io.FilterOutputStream.close(FilterOutputStream.java:158)
  152. at org.apache.thrift.transport.TIOStreamTransport.close(TIOStreamTransport.java:110)
  153. at org.apache.thrift.transport.TSocket.close(TSocket.java:235)
  154. at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.close(HiveMetaStoreClient.java:563)
  155. at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.reconnect(HiveMetaStoreClient.java:335)
  156. at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:163)
  157. at com.sun.proxy.$Proxy32.getAllFunctions(Unknown Source)
  158. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  159. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  160. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  161. at java.lang.reflect.Method.invoke(Method.java:498)
  162. at org.apache.hadoop.hive.metastore.HiveMetaStoreClient$SynchronizedHandler.invoke(HiveMetaStoreClient.java:2336)
  163. at com.sun.proxy.$Proxy32.getAllFunctions(Unknown Source)
  164. at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894)
  165. at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248)
  166. at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231)
  167. at org.apache.hadoop.hive.ql.metadata.Hive.<init>(Hive.java:388)
  168. at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332)
  169. at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312)
  170. at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288)
  171. at org.apache.spark.sql.hive.client.HiveClientImpl.client(HiveClientImpl.scala:288)
  172. at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:314)
  173. at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:255)
  174. at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:254)
  175. at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:304)
  176. at org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:413)
  177. at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:221)
  178. at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
  179. at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:99)
  180. at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:221)
  181. at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:154)
  182. at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:144)
  183. at org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:40)
  184. at org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$resourceLoader$1(HiveSessionStateBuilder.scala:47)
  185. at org.apache.spark.sql.hive.HiveSessionResourceLoader.client$lzycompute(HiveSessionStateBuilder.scala:115)
  186. at org.apache.spark.sql.hive.HiveSessionResourceLoader.client(HiveSessionStateBuilder.scala:115)
  187. at org.apache.spark.sql.hive.HiveSessionResourceLoader.addJar(HiveSessionStateBuilder.scala:117)
  188. at org.apache.spark.sql.execution.command.AddJarCommand.run(resources.scala:40)
  189. at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
  190. at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
  191. at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79)
  192. at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:226)
  193. at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3472)
  194. at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$4(SQLExecution.scala:100)
  195. at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
  196. at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:87)
  197. at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3468)
  198. at org.apache.spark.sql.Dataset.<init>(Dataset.scala:226)
  199. at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:96)
  200. at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:607)
  201. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  202. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  203. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  204. at java.lang.reflect.Method.invoke(Method.java:498)
  205. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
  206. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
  207. at py4j.Gateway.invoke(Gateway.java:282)
  208. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
  209. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  210. at py4j.GatewayConnection.run(GatewayConnection.java:238)
  211. at java.lang.Thread.run(Thread.java:748)
  212. 20/08/11 12:12:04 INFO metastore: Closed a connection to metastore, current connections: 0
  213. 20/08/11 12:12:04 INFO metastore: Trying to connect to metastore with URI thrift://10.32.74.254:9083
  214. 20/08/11 12:12:04 INFO metastore: Opened a connection to metastore, current connections: 1
  215. 20/08/11 12:12:04 WARN metastore: set_ugi() not successful, Likely cause: new client talking to old server. Continuing without it.
  216. org.apache.thrift.transport.TTransportException
  217. at org.apache.thrift.transport.TIOStreamTransport.read(TIOStreamTransport.java:132)
  218. at org.apache.thrift.transport.TTransport.readAll(TTransport.java:86)
  219. at org.apache.thrift.protocol.TBinaryProtocol.readAll(TBinaryProtocol.java:425)
  220. at org.apache.thrift.protocol.TBinaryProtocol.readI32(TBinaryProtocol.java:321)
  221. at org.apache.thrift.protocol.TBinaryProtocol.readMessageBegin(TBinaryProtocol.java:225)
  222. at org.apache.thrift.TServiceClient.receiveBase(TServiceClient.java:77)
  223. at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.recv_set_ugi(ThriftHiveMetastore.java:4247)
  224. at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.set_ugi(ThriftHiveMetastore.java:4233)
  225. at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:498)
  226. at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.reconnect(HiveMetaStoreClient.java:340)
  227. at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:163)
  228. at com.sun.proxy.$Proxy32.getAllFunctions(Unknown Source)
  229. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  230. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  231. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  232. at java.lang.reflect.Method.invoke(Method.java:498)
  233. at org.apache.hadoop.hive.metastore.HiveMetaStoreClient$SynchronizedHandler.invoke(HiveMetaStoreClient.java:2336)
  234. at com.sun.proxy.$Proxy32.getAllFunctions(Unknown Source)
  235. at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894)
  236. at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248)
  237. at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231)
  238. at org.apache.hadoop.hive.ql.metadata.Hive.<init>(Hive.java:388)
  239. at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332)
  240. at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312)
  241. at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288)
  242. at org.apache.spark.sql.hive.client.HiveClientImpl.client(HiveClientImpl.scala:288)
  243. at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:314)
  244. at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:255)
  245. at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:254)
  246. at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:304)
  247. at org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:413)
  248. at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:221)
  249. at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
  250. at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:99)
  251. at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:221)
  252. at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:154)
  253. at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:144)
  254. at org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:40)
  255. at org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$resourceLoader$1(HiveSessionStateBuilder.scala:47)
  256. at org.apache.spark.sql.hive.HiveSessionResourceLoader.client$lzycompute(HiveSessionStateBuilder.scala:115)
  257. at org.apache.spark.sql.hive.HiveSessionResourceLoader.client(HiveSessionStateBuilder.scala:115)
  258. at org.apache.spark.sql.hive.HiveSessionResourceLoader.addJar(HiveSessionStateBuilder.scala:117)
  259. at org.apache.spark.sql.execution.command.AddJarCommand.run(resources.scala:40)
  260. at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
  261. at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
  262. at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79)
  263. at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:226)
  264. at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3472)
  265. at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$4(SQLExecution.scala:100)
  266. at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
  267. at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:87)
  268. at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3468)
  269. at org.apache.spark.sql.Dataset.<init>(Dataset.scala:226)
  270. at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:96)
  271. at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:607)
  272. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  273. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  274. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  275. at java.lang.reflect.Method.invoke(Method.java:498)
  276. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
  277. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
  278. at py4j.Gateway.invoke(Gateway.java:282)
  279. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
  280. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  281. at py4j.GatewayConnection.run(GatewayConnection.java:238)
  282. at java.lang.Thread.run(Thread.java:748)
  283. 20/08/11 12:12:04 INFO metastore: Connected to metastore.
  284. 20/08/11 12:12:04 WARN Hive: Failed to register all functions.
  285. org.apache.hadoop.hive.ql.metadata.HiveException: org.apache.thrift.transport.TTransportException
  286. at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3897)
  287. at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248)
  288. at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231)
  289. at org.apache.hadoop.hive.ql.metadata.Hive.<init>(Hive.java:388)
  290. at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332)
  291. at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312)
  292. at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288)
  293. at org.apache.spark.sql.hive.client.HiveClientImpl.client(HiveClientImpl.scala:288)
  294. at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:314)
  295. at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:255)
  296. at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:254)
  297. at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:304)
  298. at org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:413)
  299. at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:221)
  300. at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
  301. at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:99)
  302. at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:221)
  303. at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:154)
  304. at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:144)
  305. at org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:40)
  306. at org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$resourceLoader$1(HiveSessionStateBuilder.scala:47)
  307. at org.apache.spark.sql.hive.HiveSessionResourceLoader.client$lzycompute(HiveSessionStateBuilder.scala:115)
  308. at org.apache.spark.sql.hive.HiveSessionResourceLoader.client(HiveSessionStateBuilder.scala:115)
  309. at org.apache.spark.sql.hive.HiveSessionResourceLoader.addJar(HiveSessionStateBuilder.scala:117)
  310. at org.apache.spark.sql.execution.command.AddJarCommand.run(resources.scala:40)
  311. at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
  312. at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
  313. at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79)
  314. at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:226)
  315. at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3472)
  316. at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$4(SQLExecution.scala:100)
  317. at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
  318. at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:87)
  319. at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3468)
  320. at org.apache.spark.sql.Dataset.<init>(Dataset.scala:226)
  321. at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:96)
  322. at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:607)
  323. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  324. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  325. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  326. at java.lang.reflect.Method.invoke(Method.java:498)
  327. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
  328. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
  329. at py4j.Gateway.invoke(Gateway.java:282)
  330. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
  331. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  332. at py4j.GatewayConnection.run(GatewayConnection.java:238)
  333. at java.lang.Thread.run(Thread.java:748)
  334. Caused by: org.apache.thrift.transport.TTransportException
  335. at org.apache.thrift.transport.TIOStreamTransport.read(TIOStreamTransport.java:132)
  336. at org.apache.thrift.transport.TTransport.readAll(TTransport.java:86)
  337. at org.apache.thrift.protocol.TBinaryProtocol.readAll(TBinaryProtocol.java:425)
  338. at org.apache.thrift.protocol.TBinaryProtocol.readI32(TBinaryProtocol.java:321)
  339. at org.apache.thrift.protocol.TBinaryProtocol.readMessageBegin(TBinaryProtocol.java:225)
  340. at org.apache.thrift.TServiceClient.receiveBase(TServiceClient.java:77)
  341. at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.recv_get_all_functions(ThriftHiveMetastore.java:3845)
  342. at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.get_all_functions(ThriftHiveMetastore.java:3833)
  343. at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.getAllFunctions(HiveMetaStoreClient.java:2399)
  344. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  345. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  346. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  347. at java.lang.reflect.Method.invoke(Method.java:498)
  348. at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:173)
  349. at com.sun.proxy.$Proxy32.getAllFunctions(Unknown Source)
  350. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  351. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  352. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  353. at java.lang.reflect.Method.invoke(Method.java:498)
  354. at org.apache.hadoop.hive.metastore.HiveMetaStoreClient$SynchronizedHandler.invoke(HiveMetaStoreClient.java:2336)
  355. at com.sun.proxy.$Proxy32.getAllFunctions(Unknown Source)
  356. at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894)
  357. ... 47 more
  358. 20/08/11 12:12:04 WARN HiveClientImpl: HiveClient got thrift exception, destroying client and retrying (0 tries remaining)
  359. org.apache.hadoop.hive.ql.metadata.HiveException: org.apache.hadoop.hive.ql.metadata.HiveException: org.apache.thrift.transport.TTransportException
  360. at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236)
  361. at org.apache.hadoop.hive.ql.metadata.Hive.<init>(Hive.java:388)
  362. at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332)
  363. at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312)
  364. at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288)
  365. at org.apache.spark.sql.hive.client.HiveClientImpl.client(HiveClientImpl.scala:288)
  366. at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:314)
  367. at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:255)
  368. at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:254)
  369. at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:304)
  370. at org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:413)
  371. at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:221)
  372. at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
  373. at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:99)
  374. at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:221)
  375. at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:154)
  376. at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:144)
  377. at org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:40)
  378. at org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$resourceLoader$1(HiveSessionStateBuilder.scala:47)
  379. at org.apache.spark.sql.hive.HiveSessionResourceLoader.client$lzycompute(HiveSessionStateBuilder.scala:115)
  380. at org.apache.spark.sql.hive.HiveSessionResourceLoader.client(HiveSessionStateBuilder.scala:115)
  381. at org.apache.spark.sql.hive.HiveSessionResourceLoader.addJar(HiveSessionStateBuilder.scala:117)
  382. at org.apache.spark.sql.execution.command.AddJarCommand.run(resources.scala:40)
  383. at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
  384. at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
  385. at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79)
  386. at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:226)
  387. at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3472)
  388. at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$4(SQLExecution.scala:100)
  389. at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
  390. at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:87)
  391. at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3468)
  392. at org.apache.spark.sql.Dataset.<init>(Dataset.scala:226)
  393. at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:96)
  394. at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:607)
  395. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  396. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  397. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  398. at java.lang.reflect.Method.invoke(Method.java:498)
  399. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
  400. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
  401. at py4j.Gateway.invoke(Gateway.java:282)
  402. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
  403. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  404. at py4j.GatewayConnection.run(GatewayConnection.java:238)
  405. at java.lang.Thread.run(Thread.java:748)
  406. Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: org.apache.thrift.transport.TTransportException
  407. at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3897)
  408. at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248)
  409. at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231)
  410. ... 45 more
  411. Caused by: org.apache.thrift.transport.TTransportException
  412. at org.apache.thrift.transport.TIOStreamTransport.read(TIOStreamTransport.java:132)
  413. at org.apache.thrift.transport.TTransport.readAll(TTransport.java:86)
  414. at org.apache.thrift.protocol.TBinaryProtocol.readAll(TBinaryProtocol.java:425)
  415. at org.apache.thrift.protocol.TBinaryProtocol.readI32(TBinaryProtocol.java:321)
  416. at org.apache.thrift.protocol.TBinaryProtocol.readMessageBegin(TBinaryProtocol.java:225)
  417. at org.apache.thrift.TServiceClient.receiveBase(TServiceClient.java:77)
  418. at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.recv_get_all_functions(ThriftHiveMetastore.java:3845)
  419. at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.get_all_functions(ThriftHiveMetastore.java:3833)
  420. at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.getAllFunctions(HiveMetaStoreClient.java:2399)
  421. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  422. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  423. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  424. at java.lang.reflect.Method.invoke(Method.java:498)
  425. at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:173)
  426. at com.sun.proxy.$Proxy32.getAllFunctions(Unknown Source)
  427. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  428. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  429. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  430. at java.lang.reflect.Method.invoke(Method.java:498)
  431. at org.apache.hadoop.hive.metastore.HiveMetaStoreClient$SynchronizedHandler.invoke(HiveMetaStoreClient.java:2336)
  432. at com.sun.proxy.$Proxy32.getAllFunctions(Unknown Source)
  433. at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894)
  434. ... 47 more
  435. 20/08/11 12:12:05 WARN HiveClientImpl: Deadline exceeded
  436. Traceback (most recent call last):
  437. File "/var/lib/spark/python/lib/pyspark.zip/pyspark/sql/utils.py", line 98, in deco
  438. File "/var/lib/spark/python/lib/py4j-0.10.8.1-src.zip/py4j/protocol.py", line 328, in get_return_value
  439. py4j.protocol.Py4JJavaError: An error occurred while calling o53.sql.
  440. : org.apache.spark.sql.AnalysisException: org.apache.hadoop.hive.ql.metadata.HiveException: org.apache.hadoop.hive.ql.metadata.HiveException: org.apache.thrift.transport.TTransportException;
  441. at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:109)
  442. at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:221)
  443. at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:154)
  444. at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:144)
  445. at org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:40)
  446. at org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$resourceLoader$1(HiveSessionStateBuilder.scala:47)
  447. at org.apache.spark.sql.hive.HiveSessionResourceLoader.client$lzycompute(HiveSessionStateBuilder.scala:115)
  448. at org.apache.spark.sql.hive.HiveSessionResourceLoader.client(HiveSessionStateBuilder.scala:115)
  449. at org.apache.spark.sql.hive.HiveSessionResourceLoader.addJar(HiveSessionStateBuilder.scala:117)
  450. at org.apache.spark.sql.execution.command.AddJarCommand.run(resources.scala:40)
  451. at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
  452. at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
  453. at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79)
  454. at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:226)
  455. at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3472)
  456. at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$4(SQLExecution.scala:100)
  457. at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
  458. at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:87)
  459. at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3468)
  460. at org.apache.spark.sql.Dataset.<init>(Dataset.scala:226)
  461. at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:96)
  462. at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:607)
  463. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  464. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  465. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  466. at java.lang.reflect.Method.invoke(Method.java:498)
  467. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
  468. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
  469. at py4j.Gateway.invoke(Gateway.java:282)
  470. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
  471. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  472. at py4j.GatewayConnection.run(GatewayConnection.java:238)
  473. at java.lang.Thread.run(Thread.java:748)
  474. Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: org.apache.hadoop.hive.ql.metadata.HiveException: org.apache.thrift.transport.TTransportException
  475. at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236)
  476. at org.apache.hadoop.hive.ql.metadata.Hive.<init>(Hive.java:388)
  477. at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332)
  478. at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312)
  479. at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288)
  480. at org.apache.spark.sql.hive.client.HiveClientImpl.client(HiveClientImpl.scala:288)
  481. at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:314)
  482. at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:255)
  483. at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:254)
  484. at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:304)
  485. at org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:413)
  486. at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:221)
  487. at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
  488. at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:99)
  489. ... 32 more
  490. Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: org.apache.thrift.transport.TTransportException
  491. at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3897)
  492. at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248)
  493. at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231)
  494. ... 45 more
  495. Caused by: org.apache.thrift.transport.TTransportException
  496. at org.apache.thrift.transport.TIOStreamTransport.read(TIOStreamTransport.java:132)
  497. at org.apache.thrift.transport.TTransport.readAll(TTransport.java:86)
  498. at org.apache.thrift.protocol.TBinaryProtocol.readAll(TBinaryProtocol.java:425)
  499. at org.apache.thrift.protocol.TBinaryProtocol.readI32(TBinaryProtocol.java:321)
  500. at org.apache.thrift.protocol.TBinaryProtocol.readMessageBegin(TBinaryProtocol.java:225)
  501. at org.apache.thrift.TServiceClient.receiveBase(TServiceClient.java:77)
  502. at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.recv_get_all_functions(ThriftHiveMetastore.java:3845)
  503. at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.get_all_functions(ThriftHiveMetastore.java:3833)
  504. at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.getAllFunctions(HiveMetaStoreClient.java:2399)
  505. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  506. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  507. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  508. at java.lang.reflect.Method.invoke(Method.java:498)
  509. at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:173)
  510. at com.sun.proxy.$Proxy32.getAllFunctions(Unknown Source)
  511. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  512. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  513. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  514. at java.lang.reflect.Method.invoke(Method.java:498)
  515. at org.apache.hadoop.hive.metastore.HiveMetaStoreClient$SynchronizedHandler.invoke(HiveMetaStoreClient.java:2336)
  516. at com.sun.proxy.$Proxy32.getAllFunctions(Unknown Source)
  517. at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894)
  518. ... 47 more
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement