Advertisement
Guest User

Untitled

a guest
Jan 17th, 2018
132
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 13.77 KB | None | 0 0
  1. AsynchronousException{java.lang.Exception: Could not materialize checkpoint 3 for operator Source: Custom Source (1/1).}
  2. at org.apache.flink.streaming.runtime.tasks.StreamTask$AsyncCheckpointRunnable.run(StreamTask.java:948)
  3. at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
  4. at java.util.concurrent.FutureTask.run(FutureTask.java:266)
  5. at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
  6. at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
  7. at java.lang.Thread.run(Thread.java:748)
  8. Caused by: java.lang.Exception: Could not materialize checkpoint 3 for operator Source: Custom Source (1/1).
  9. ... 6 more
  10. Caused by: java.util.concurrent.ExecutionException: java.io.IOException: Could not flush and close the file system output stream to hdfs://namenode:8020/flink/checkpoints/a6579ec75602a754b0d604f8429f1a6b/chk-3/7440f364-9160-41dd-bcd7-e72b4eae6324 in order to obtain the stream state handle
  11. at java.util.concurrent.FutureTask.report(FutureTask.java:122)
  12. at java.util.concurrent.FutureTask.get(FutureTask.java:192)
  13. at org.apache.flink.util.FutureUtil.runIfNotDoneAndGet(FutureUtil.java:43)
  14. at org.apache.flink.streaming.runtime.tasks.StreamTask$AsyncCheckpointRunnable.run(StreamTask.java:892)
  15. ... 5 more
  16. Suppressed: java.lang.Exception: Could not properly cancel managed operator state future.
  17. at org.apache.flink.streaming.api.operators.OperatorSnapshotResult.cancel(OperatorSnapshotResult.java:99)
  18. at org.apache.flink.streaming.runtime.tasks.StreamTask$AsyncCheckpointRunnable.cleanup(StreamTask.java:976)
  19. at org.apache.flink.streaming.runtime.tasks.StreamTask$AsyncCheckpointRunnable.run(StreamTask.java:939)
  20. ... 5 more
  21. Caused by: java.util.concurrent.ExecutionException: java.io.IOException: Could not flush and close the file system output stream to hdfs://namenode:8020/flink/checkpoints/a6579ec75602a754b0d604f8429f1a6b/chk-3/7440f364-9160-41dd-bcd7-e72b4eae6324 in order to obtain the stream state handle
  22. at java.util.concurrent.FutureTask.report(FutureTask.java:122)
  23. at java.util.concurrent.FutureTask.get(FutureTask.java:192)
  24. at org.apache.flink.util.FutureUtil.runIfNotDoneAndGet(FutureUtil.java:43)
  25. at org.apache.flink.runtime.state.StateUtil.discardStateFuture(StateUtil.java:66)
  26. at org.apache.flink.streaming.api.operators.OperatorSnapshotResult.cancel(OperatorSnapshotResult.java:97)
  27. ... 7 more
  28. Caused by: java.io.IOException: Could not flush and close the file system output stream to hdfs://namenode:8020/flink/checkpoints/a6579ec75602a754b0d604f8429f1a6b/chk-3/7440f364-9160-41dd-bcd7-e72b4eae6324 in order to obtain the stream state handle
  29. at org.apache.flink.runtime.state.filesystem.FsCheckpointStreamFactory$FsCheckpointStateOutputStream.closeAndGetHandle(FsCheckpointStreamFactory.java:336)
  30. at org.apache.flink.runtime.state.DefaultOperatorStateBackend$1.performOperation(DefaultOperatorStateBackend.java:300)
  31. at org.apache.flink.runtime.state.DefaultOperatorStateBackend$1.performOperation(DefaultOperatorStateBackend.java:230)
  32. at org.apache.flink.runtime.io.async.AbstractAsyncCallableWithResources.call(AbstractAsyncCallableWithResources.java:75)
  33. at java.util.concurrent.FutureTask.run(FutureTask.java:266)
  34. at org.apache.flink.util.FutureUtil.runIfNotDoneAndGet(FutureUtil.java:40)
  35. at org.apache.flink.streaming.runtime.tasks.StreamTask$AsyncCheckpointRunnable.run(StreamTask.java:892)
  36. ... 5 more
  37. Caused by: org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /flink/checkpoints/a6579ec75602a754b0d604f8429f1a6b/chk-3/7440f364-9160-41dd-bcd7-e72b4eae6324 could only be replicated to 0 nodes instead of minReplication (=1). There are 1 datanode(s) running and no node(s) are excluded in this operation.
  38. at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1550)
  39. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3110)
  40. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3034)
  41. at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:723)
  42. at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:492)
  43. at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
  44. at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
  45. at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
  46. at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
  47. at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
  48. at java.security.AccessController.doPrivileged(Native Method)
  49. at javax.security.auth.Subject.doAs(Subject.java:422)
  50. at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
  51. at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
  52.  
  53. at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1481)
  54. at org.apache.hadoop.ipc.Client.call(Client.java:1427)
  55. at org.apache.hadoop.ipc.Client.call(Client.java:1337)
  56. at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:227)
  57. at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:116)
  58. at com.sun.proxy.$Proxy13.addBlock(Unknown Source)
  59. at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:440)
  60. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  61. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  62. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  63. at java.lang.reflect.Method.invoke(Method.java:498)
  64. at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:398)
  65. at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:163)
  66. at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:155)
  67. at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95)
  68. at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:335)
  69. at com.sun.proxy.$Proxy14.addBlock(Unknown Source)
  70. at org.apache.hadoop.hdfs.DataStreamer.locateFollowingBlock(DataStreamer.java:1733)
  71. at org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1536)
  72. at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:658)
  73. [CIRCULAR REFERENCE:java.io.IOException: Could not flush and close the file system output stream to hdfs://namenode:8020/flink/checkpoints/a6579ec75602a754b0d604f8429f1a6b/chk-3/7440f364-9160-41dd-bcd7-e72b4eae6324 in order to obtain the stream state handle]
  74. Source: Custom Source (1/1)
  75. Timestamp: 2018-01-17, 12:03:40 Location: 8898d49999cf:33727
  76. AsynchronousException{java.lang.Exception: Could not materialize checkpoint 3 for operator Source: Custom Source (1/1).}
  77. at org.apache.flink.streaming.runtime.tasks.StreamTask$AsyncCheckpointRunnable.run(StreamTask.java:948)
  78. at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
  79. at java.util.concurrent.FutureTask.run(FutureTask.java:266)
  80. at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
  81. at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
  82. at java.lang.Thread.run(Thread.java:748)
  83. Caused by: java.lang.Exception: Could not materialize checkpoint 3 for operator Source: Custom Source (1/1).
  84. ... 6 more
  85. Caused by: java.util.concurrent.ExecutionException: java.io.IOException: Could not flush and close the file system output stream to hdfs://namenode:8020/flink/checkpoints/a6579ec75602a754b0d604f8429f1a6b/chk-3/7440f364-9160-41dd-bcd7-e72b4eae6324 in order to obtain the stream state handle
  86. at java.util.concurrent.FutureTask.report(FutureTask.java:122)
  87. at java.util.concurrent.FutureTask.get(FutureTask.java:192)
  88. at org.apache.flink.util.FutureUtil.runIfNotDoneAndGet(FutureUtil.java:43)
  89. at org.apache.flink.streaming.runtime.tasks.StreamTask$AsyncCheckpointRunnable.run(StreamTask.java:892)
  90. ... 5 more
  91. Suppressed: java.lang.Exception: Could not properly cancel managed operator state future.
  92. at org.apache.flink.streaming.api.operators.OperatorSnapshotResult.cancel(OperatorSnapshotResult.java:99)
  93. at org.apache.flink.streaming.runtime.tasks.StreamTask$AsyncCheckpointRunnable.cleanup(StreamTask.java:976)
  94. at org.apache.flink.streaming.runtime.tasks.StreamTask$AsyncCheckpointRunnable.run(StreamTask.java:939)
  95. ... 5 more
  96. Caused by: java.util.concurrent.ExecutionException: java.io.IOException: Could not flush and close the file system output stream to hdfs://namenode:8020/flink/checkpoints/a6579ec75602a754b0d604f8429f1a6b/chk-3/7440f364-9160-41dd-bcd7-e72b4eae6324 in order to obtain the stream state handle
  97. at java.util.concurrent.FutureTask.report(FutureTask.java:122)
  98. at java.util.concurrent.FutureTask.get(FutureTask.java:192)
  99. at org.apache.flink.util.FutureUtil.runIfNotDoneAndGet(FutureUtil.java:43)
  100. at org.apache.flink.runtime.state.StateUtil.discardStateFuture(StateUtil.java:66)
  101. at org.apache.flink.streaming.api.operators.OperatorSnapshotResult.cancel(OperatorSnapshotResult.java:97)
  102. ... 7 more
  103. Caused by: java.io.IOException: Could not flush and close the file system output stream to hdfs://namenode:8020/flink/checkpoints/a6579ec75602a754b0d604f8429f1a6b/chk-3/7440f364-9160-41dd-bcd7-e72b4eae6324 in order to obtain the stream state handle
  104. at org.apache.flink.runtime.state.filesystem.FsCheckpointStreamFactory$FsCheckpointStateOutputStream.closeAndGetHandle(FsCheckpointStreamFactory.java:336)
  105. at org.apache.flink.runtime.state.DefaultOperatorStateBackend$1.performOperation(DefaultOperatorStateBackend.java:300)
  106. at org.apache.flink.runtime.state.DefaultOperatorStateBackend$1.performOperation(DefaultOperatorStateBackend.java:230)
  107. at org.apache.flink.runtime.io.async.AbstractAsyncCallableWithResources.call(AbstractAsyncCallableWithResources.java:75)
  108. at java.util.concurrent.FutureTask.run(FutureTask.java:266)
  109. at org.apache.flink.util.FutureUtil.runIfNotDoneAndGet(FutureUtil.java:40)
  110. at org.apache.flink.streaming.runtime.tasks.StreamTask$AsyncCheckpointRunnable.run(StreamTask.java:892)
  111. ... 5 more
  112. Caused by: org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /flink/checkpoints/a6579ec75602a754b0d604f8429f1a6b/chk-3/7440f364-9160-41dd-bcd7-e72b4eae6324 could only be replicated to 0 nodes instead of minReplication (=1). There are 1 datanode(s) running and no node(s) are excluded in this operation.
  113. at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1550)
  114. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3110)
  115. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3034)
  116. at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:723)
  117. at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:492)
  118. at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
  119. at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
  120. at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
  121. at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
  122. at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
  123. at java.security.AccessController.doPrivileged(Native Method)
  124. at javax.security.auth.Subject.doAs(Subject.java:422)
  125. at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
  126. at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
  127.  
  128. at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1481)
  129. at org.apache.hadoop.ipc.Client.call(Client.java:1427)
  130. at org.apache.hadoop.ipc.Client.call(Client.java:1337)
  131. at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:227)
  132. at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:116)
  133. at com.sun.proxy.$Proxy13.addBlock(Unknown Source)
  134. at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:440)
  135. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  136. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  137. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  138. at java.lang.reflect.Method.invoke(Method.java:498)
  139. at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:398)
  140. at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:163)
  141. at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:155)
  142. at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95)
  143. at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:335)
  144. at com.sun.proxy.$Proxy14.addBlock(Unknown Source)
  145. at org.apache.hadoop.hdfs.DataStreamer.locateFollowingBlock(DataStreamer.java:1733)
  146. at org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1536)
  147. at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:658)
  148. [CIRCULAR REFERENCE:java.io.IOException: Could not flush and close the file system output stream to hdfs://namenode:8020/flink/checkpoints/a6579ec75602a754b0d604f8429f1a6b/chk-3/7440f364-9160-41dd-bcd7-e72b4eae6324 in order to obtain the stream state handle]
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement