Advertisement
Guest User

Untitled

a guest
May 15th, 2016
333
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Bash 84.65 KB | None | 0 0
  1. Asafs-MBP:hadoop-2.7.2 asafchelouche$ bin/hdfs dfs -put etc/hadoop /input
  2. 16/05/15 17:09:11 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
  3. 16/05/15 17:09:11 WARN hdfs.DFSClient: DataStreamer Exception
  4. org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /input/hadoop/capacity-scheduler.xml._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  5.     at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1547)
  6.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3107)
  7.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3031)
  8.     at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:724)
  9.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:492)
  10.     at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
  11.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
  12.     at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
  13.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
  14.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
  15.     at java.security.AccessController.doPrivileged(Native Method)
  16.     at javax.security.auth.Subject.doAs(Subject.java:422)
  17.     at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
  18.     at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
  19.  
  20.     at org.apache.hadoop.ipc.Client.call(Client.java:1475)
  21.     at org.apache.hadoop.ipc.Client.call(Client.java:1412)
  22.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
  23.     at com.sun.proxy.$Proxy9.addBlock(Unknown Source)
  24.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:418)
  25.     at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  26.     at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  27.     at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  28.     at java.lang.reflect.Method.invoke(Method.java:497)
  29.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
  30.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
  31.     at com.sun.proxy.$Proxy10.addBlock(Unknown Source)
  32.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.locateFollowingBlock(DFSOutputStream.java:1459)
  33.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1255)
  34.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:449)
  35. put: File /input/hadoop/capacity-scheduler.xml._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  36. 16/05/15 17:09:11 WARN hdfs.DFSClient: DataStreamer Exception
  37. org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /input/hadoop/configuration.xsl._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  38.     at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1547)
  39.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3107)
  40.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3031)
  41.     at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:724)
  42.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:492)
  43.     at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
  44.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
  45.     at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
  46.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
  47.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
  48.     at java.security.AccessController.doPrivileged(Native Method)
  49.     at javax.security.auth.Subject.doAs(Subject.java:422)
  50.     at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
  51.     at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
  52.  
  53.     at org.apache.hadoop.ipc.Client.call(Client.java:1475)
  54.     at org.apache.hadoop.ipc.Client.call(Client.java:1412)
  55.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
  56.     at com.sun.proxy.$Proxy9.addBlock(Unknown Source)
  57.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:418)
  58.     at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  59.     at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  60.     at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  61.     at java.lang.reflect.Method.invoke(Method.java:497)
  62.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
  63.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
  64.     at com.sun.proxy.$Proxy10.addBlock(Unknown Source)
  65.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.locateFollowingBlock(DFSOutputStream.java:1459)
  66.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1255)
  67.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:449)
  68. put: File /input/hadoop/configuration.xsl._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  69. 16/05/15 17:09:11 WARN hdfs.DFSClient: DataStreamer Exception
  70. org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /input/hadoop/container-executor.cfg._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  71.     at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1547)
  72.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3107)
  73.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3031)
  74.     at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:724)
  75.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:492)
  76.     at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
  77.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
  78.     at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
  79.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
  80.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
  81.     at java.security.AccessController.doPrivileged(Native Method)
  82.     at javax.security.auth.Subject.doAs(Subject.java:422)
  83.     at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
  84.     at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
  85.  
  86.     at org.apache.hadoop.ipc.Client.call(Client.java:1475)
  87.     at org.apache.hadoop.ipc.Client.call(Client.java:1412)
  88.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
  89.     at com.sun.proxy.$Proxy9.addBlock(Unknown Source)
  90.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:418)
  91.     at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  92.     at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  93.     at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  94.     at java.lang.reflect.Method.invoke(Method.java:497)
  95.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
  96.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
  97.     at com.sun.proxy.$Proxy10.addBlock(Unknown Source)
  98.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.locateFollowingBlock(DFSOutputStream.java:1459)
  99.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1255)
  100.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:449)
  101. put: File /input/hadoop/container-executor.cfg._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  102. 16/05/15 17:09:11 WARN hdfs.DFSClient: DataStreamer Exception
  103. org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /input/hadoop/core-site.xml._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  104.     at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1547)
  105.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3107)
  106.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3031)
  107.     at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:724)
  108.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:492)
  109.     at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
  110.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
  111.     at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
  112.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
  113.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
  114.     at java.security.AccessController.doPrivileged(Native Method)
  115.     at javax.security.auth.Subject.doAs(Subject.java:422)
  116.     at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
  117.     at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
  118.  
  119.     at org.apache.hadoop.ipc.Client.call(Client.java:1475)
  120.     at org.apache.hadoop.ipc.Client.call(Client.java:1412)
  121.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
  122.     at com.sun.proxy.$Proxy9.addBlock(Unknown Source)
  123.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:418)
  124.     at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  125.     at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  126.     at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  127.     at java.lang.reflect.Method.invoke(Method.java:497)
  128.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
  129.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
  130.     at com.sun.proxy.$Proxy10.addBlock(Unknown Source)
  131.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.locateFollowingBlock(DFSOutputStream.java:1459)
  132.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1255)
  133.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:449)
  134. put: File /input/hadoop/core-site.xml._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  135. 16/05/15 17:09:11 WARN hdfs.DFSClient: DataStreamer Exception
  136. org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /input/hadoop/hadoop-env.cmd._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  137.     at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1547)
  138.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3107)
  139.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3031)
  140.     at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:724)
  141.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:492)
  142.     at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
  143.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
  144.     at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
  145.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
  146.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
  147.     at java.security.AccessController.doPrivileged(Native Method)
  148.     at javax.security.auth.Subject.doAs(Subject.java:422)
  149.     at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
  150.     at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
  151.  
  152.     at org.apache.hadoop.ipc.Client.call(Client.java:1475)
  153.     at org.apache.hadoop.ipc.Client.call(Client.java:1412)
  154.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
  155.     at com.sun.proxy.$Proxy9.addBlock(Unknown Source)
  156.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:418)
  157.     at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  158.     at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  159.     at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  160.     at java.lang.reflect.Method.invoke(Method.java:497)
  161.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
  162.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
  163.     at com.sun.proxy.$Proxy10.addBlock(Unknown Source)
  164.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.locateFollowingBlock(DFSOutputStream.java:1459)
  165.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1255)
  166.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:449)
  167. put: File /input/hadoop/hadoop-env.cmd._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  168. 16/05/15 17:09:11 WARN hdfs.DFSClient: DataStreamer Exception
  169. org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /input/hadoop/hadoop-env.sh._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  170.     at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1547)
  171.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3107)
  172.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3031)
  173.     at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:724)
  174.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:492)
  175.     at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
  176.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
  177.     at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
  178.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
  179.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
  180.     at java.security.AccessController.doPrivileged(Native Method)
  181.     at javax.security.auth.Subject.doAs(Subject.java:422)
  182.     at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
  183.     at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
  184.  
  185.     at org.apache.hadoop.ipc.Client.call(Client.java:1475)
  186.     at org.apache.hadoop.ipc.Client.call(Client.java:1412)
  187.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
  188.     at com.sun.proxy.$Proxy9.addBlock(Unknown Source)
  189.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:418)
  190.     at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  191.     at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  192.     at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  193.     at java.lang.reflect.Method.invoke(Method.java:497)
  194.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
  195.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
  196.     at com.sun.proxy.$Proxy10.addBlock(Unknown Source)
  197.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.locateFollowingBlock(DFSOutputStream.java:1459)
  198.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1255)
  199.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:449)
  200. put: File /input/hadoop/hadoop-env.sh._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  201. 16/05/15 17:09:11 WARN hdfs.DFSClient: DataStreamer Exception
  202. org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /input/hadoop/hadoop-metrics.properties._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  203.     at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1547)
  204.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3107)
  205.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3031)
  206.     at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:724)
  207.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:492)
  208.     at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
  209.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
  210.     at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
  211.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
  212.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
  213.     at java.security.AccessController.doPrivileged(Native Method)
  214.     at javax.security.auth.Subject.doAs(Subject.java:422)
  215.     at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
  216.     at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
  217.  
  218.     at org.apache.hadoop.ipc.Client.call(Client.java:1475)
  219.     at org.apache.hadoop.ipc.Client.call(Client.java:1412)
  220.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
  221.     at com.sun.proxy.$Proxy9.addBlock(Unknown Source)
  222.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:418)
  223.     at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  224.     at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  225.     at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  226.     at java.lang.reflect.Method.invoke(Method.java:497)
  227.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
  228.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
  229.     at com.sun.proxy.$Proxy10.addBlock(Unknown Source)
  230.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.locateFollowingBlock(DFSOutputStream.java:1459)
  231.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1255)
  232.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:449)
  233. put: File /input/hadoop/hadoop-metrics.properties._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  234. 16/05/15 17:09:11 WARN hdfs.DFSClient: DataStreamer Exception
  235. org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /input/hadoop/hadoop-metrics2.properties._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  236.     at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1547)
  237.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3107)
  238.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3031)
  239.     at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:724)
  240.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:492)
  241.     at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
  242.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
  243.     at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
  244.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
  245.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
  246.     at java.security.AccessController.doPrivileged(Native Method)
  247.     at javax.security.auth.Subject.doAs(Subject.java:422)
  248.     at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
  249.     at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
  250.  
  251.     at org.apache.hadoop.ipc.Client.call(Client.java:1475)
  252.     at org.apache.hadoop.ipc.Client.call(Client.java:1412)
  253.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
  254.     at com.sun.proxy.$Proxy9.addBlock(Unknown Source)
  255.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:418)
  256.     at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  257.     at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  258.     at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  259.     at java.lang.reflect.Method.invoke(Method.java:497)
  260.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
  261.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
  262.     at com.sun.proxy.$Proxy10.addBlock(Unknown Source)
  263.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.locateFollowingBlock(DFSOutputStream.java:1459)
  264.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1255)
  265.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:449)
  266. put: File /input/hadoop/hadoop-metrics2.properties._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  267. 16/05/15 17:09:11 WARN hdfs.DFSClient: DataStreamer Exception
  268. org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /input/hadoop/hadoop-policy.xml._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  269.     at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1547)
  270.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3107)
  271.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3031)
  272.     at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:724)
  273.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:492)
  274.     at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
  275.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
  276.     at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
  277.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
  278.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
  279.     at java.security.AccessController.doPrivileged(Native Method)
  280.     at javax.security.auth.Subject.doAs(Subject.java:422)
  281.     at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
  282.     at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
  283.  
  284.     at org.apache.hadoop.ipc.Client.call(Client.java:1475)
  285.     at org.apache.hadoop.ipc.Client.call(Client.java:1412)
  286.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
  287.     at com.sun.proxy.$Proxy9.addBlock(Unknown Source)
  288.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:418)
  289.     at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  290.     at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  291.     at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  292.     at java.lang.reflect.Method.invoke(Method.java:497)
  293.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
  294.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
  295.     at com.sun.proxy.$Proxy10.addBlock(Unknown Source)
  296.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.locateFollowingBlock(DFSOutputStream.java:1459)
  297.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1255)
  298.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:449)
  299. put: File /input/hadoop/hadoop-policy.xml._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  300. 16/05/15 17:09:11 WARN hdfs.DFSClient: DataStreamer Exception
  301. org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /input/hadoop/hdfs-site.xml._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  302.     at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1547)
  303.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3107)
  304.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3031)
  305.     at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:724)
  306.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:492)
  307.     at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
  308.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
  309.     at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
  310.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
  311.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
  312.     at java.security.AccessController.doPrivileged(Native Method)
  313.     at javax.security.auth.Subject.doAs(Subject.java:422)
  314.     at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
  315.     at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
  316.  
  317.     at org.apache.hadoop.ipc.Client.call(Client.java:1475)
  318.     at org.apache.hadoop.ipc.Client.call(Client.java:1412)
  319.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
  320.     at com.sun.proxy.$Proxy9.addBlock(Unknown Source)
  321.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:418)
  322.     at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  323.     at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  324.     at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  325.     at java.lang.reflect.Method.invoke(Method.java:497)
  326.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
  327.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
  328.     at com.sun.proxy.$Proxy10.addBlock(Unknown Source)
  329.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.locateFollowingBlock(DFSOutputStream.java:1459)
  330.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1255)
  331.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:449)
  332. put: File /input/hadoop/hdfs-site.xml._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  333. 16/05/15 17:09:11 WARN hdfs.DFSClient: DataStreamer Exception
  334. org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /input/hadoop/httpfs-env.sh._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  335.     at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1547)
  336.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3107)
  337.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3031)
  338.     at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:724)
  339.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:492)
  340.     at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
  341.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
  342.     at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
  343.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
  344.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
  345.     at java.security.AccessController.doPrivileged(Native Method)
  346.     at javax.security.auth.Subject.doAs(Subject.java:422)
  347.     at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
  348.     at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
  349.  
  350.     at org.apache.hadoop.ipc.Client.call(Client.java:1475)
  351.     at org.apache.hadoop.ipc.Client.call(Client.java:1412)
  352.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
  353.     at com.sun.proxy.$Proxy9.addBlock(Unknown Source)
  354.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:418)
  355.     at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  356.     at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  357.     at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  358.     at java.lang.reflect.Method.invoke(Method.java:497)
  359.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
  360.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
  361.     at com.sun.proxy.$Proxy10.addBlock(Unknown Source)
  362.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.locateFollowingBlock(DFSOutputStream.java:1459)
  363.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1255)
  364.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:449)
  365. put: File /input/hadoop/httpfs-env.sh._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  366. 16/05/15 17:09:11 WARN hdfs.DFSClient: DataStreamer Exception
  367. org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /input/hadoop/httpfs-log4j.properties._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  368.     at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1547)
  369.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3107)
  370.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3031)
  371.     at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:724)
  372.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:492)
  373.     at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
  374.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
  375.     at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
  376.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
  377.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
  378.     at java.security.AccessController.doPrivileged(Native Method)
  379.     at javax.security.auth.Subject.doAs(Subject.java:422)
  380.     at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
  381.     at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
  382.  
  383.     at org.apache.hadoop.ipc.Client.call(Client.java:1475)
  384.     at org.apache.hadoop.ipc.Client.call(Client.java:1412)
  385.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
  386.     at com.sun.proxy.$Proxy9.addBlock(Unknown Source)
  387.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:418)
  388.     at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  389.     at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  390.     at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  391.     at java.lang.reflect.Method.invoke(Method.java:497)
  392.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
  393.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
  394.     at com.sun.proxy.$Proxy10.addBlock(Unknown Source)
  395.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.locateFollowingBlock(DFSOutputStream.java:1459)
  396.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1255)
  397.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:449)
  398. put: File /input/hadoop/httpfs-log4j.properties._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  399. 16/05/15 17:09:11 WARN hdfs.DFSClient: DataStreamer Exception
  400. org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /input/hadoop/httpfs-signature.secret._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  401.     at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1547)
  402.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3107)
  403.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3031)
  404.     at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:724)
  405.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:492)
  406.     at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
  407.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
  408.     at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
  409.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
  410.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
  411.     at java.security.AccessController.doPrivileged(Native Method)
  412.     at javax.security.auth.Subject.doAs(Subject.java:422)
  413.     at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
  414.     at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
  415.  
  416.     at org.apache.hadoop.ipc.Client.call(Client.java:1475)
  417.     at org.apache.hadoop.ipc.Client.call(Client.java:1412)
  418.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
  419.     at com.sun.proxy.$Proxy9.addBlock(Unknown Source)
  420.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:418)
  421.     at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  422.     at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  423.     at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  424.     at java.lang.reflect.Method.invoke(Method.java:497)
  425.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
  426.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
  427.     at com.sun.proxy.$Proxy10.addBlock(Unknown Source)
  428.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.locateFollowingBlock(DFSOutputStream.java:1459)
  429.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1255)
  430.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:449)
  431. put: File /input/hadoop/httpfs-signature.secret._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  432. 16/05/15 17:09:11 WARN hdfs.DFSClient: DataStreamer Exception
  433. org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /input/hadoop/httpfs-site.xml._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  434.     at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1547)
  435.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3107)
  436.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3031)
  437.     at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:724)
  438.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:492)
  439.     at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
  440.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
  441.     at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
  442.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
  443.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
  444.     at java.security.AccessController.doPrivileged(Native Method)
  445.     at javax.security.auth.Subject.doAs(Subject.java:422)
  446.     at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
  447.     at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
  448.  
  449.     at org.apache.hadoop.ipc.Client.call(Client.java:1475)
  450.     at org.apache.hadoop.ipc.Client.call(Client.java:1412)
  451.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
  452.     at com.sun.proxy.$Proxy9.addBlock(Unknown Source)
  453.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:418)
  454.     at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  455.     at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  456.     at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  457.     at java.lang.reflect.Method.invoke(Method.java:497)
  458.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
  459.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
  460.     at com.sun.proxy.$Proxy10.addBlock(Unknown Source)
  461.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.locateFollowingBlock(DFSOutputStream.java:1459)
  462.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1255)
  463.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:449)
  464. put: File /input/hadoop/httpfs-site.xml._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  465. 16/05/15 17:09:11 WARN hdfs.DFSClient: DataStreamer Exception
  466. org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /input/hadoop/kms-acls.xml._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  467.     at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1547)
  468.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3107)
  469.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3031)
  470.     at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:724)
  471.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:492)
  472.     at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
  473.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
  474.     at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
  475.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
  476.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
  477.     at java.security.AccessController.doPrivileged(Native Method)
  478.     at javax.security.auth.Subject.doAs(Subject.java:422)
  479.     at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
  480.     at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
  481.  
  482.     at org.apache.hadoop.ipc.Client.call(Client.java:1475)
  483.     at org.apache.hadoop.ipc.Client.call(Client.java:1412)
  484.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
  485.     at com.sun.proxy.$Proxy9.addBlock(Unknown Source)
  486.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:418)
  487.     at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  488.     at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  489.     at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  490.     at java.lang.reflect.Method.invoke(Method.java:497)
  491.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
  492.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
  493.     at com.sun.proxy.$Proxy10.addBlock(Unknown Source)
  494.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.locateFollowingBlock(DFSOutputStream.java:1459)
  495.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1255)
  496.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:449)
  497. put: File /input/hadoop/kms-acls.xml._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  498. 16/05/15 17:09:11 WARN hdfs.DFSClient: DataStreamer Exception
  499. org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /input/hadoop/kms-env.sh._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  500.     at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1547)
  501.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3107)
  502.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3031)
  503.     at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:724)
  504.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:492)
  505.     at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
  506.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
  507.     at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
  508.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
  509.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
  510.     at java.security.AccessController.doPrivileged(Native Method)
  511.     at javax.security.auth.Subject.doAs(Subject.java:422)
  512.     at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
  513.     at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
  514.  
  515.     at org.apache.hadoop.ipc.Client.call(Client.java:1475)
  516.     at org.apache.hadoop.ipc.Client.call(Client.java:1412)
  517.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
  518.     at com.sun.proxy.$Proxy9.addBlock(Unknown Source)
  519.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:418)
  520.     at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  521.     at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  522.     at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  523.     at java.lang.reflect.Method.invoke(Method.java:497)
  524.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
  525.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
  526.     at com.sun.proxy.$Proxy10.addBlock(Unknown Source)
  527.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.locateFollowingBlock(DFSOutputStream.java:1459)
  528.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1255)
  529.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:449)
  530. put: File /input/hadoop/kms-env.sh._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  531. 16/05/15 17:09:11 WARN hdfs.DFSClient: DataStreamer Exception
  532. org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /input/hadoop/kms-log4j.properties._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  533.     at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1547)
  534.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3107)
  535.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3031)
  536.     at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:724)
  537.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:492)
  538.     at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
  539.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
  540.     at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
  541.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
  542.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
  543.     at java.security.AccessController.doPrivileged(Native Method)
  544.     at javax.security.auth.Subject.doAs(Subject.java:422)
  545.     at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
  546.     at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
  547.  
  548.     at org.apache.hadoop.ipc.Client.call(Client.java:1475)
  549.     at org.apache.hadoop.ipc.Client.call(Client.java:1412)
  550.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
  551.     at com.sun.proxy.$Proxy9.addBlock(Unknown Source)
  552.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:418)
  553.     at sun.reflect.GeneratedMethodAccessor4.invoke(Unknown Source)
  554.     at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  555.     at java.lang.reflect.Method.invoke(Method.java:497)
  556.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
  557.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
  558.     at com.sun.proxy.$Proxy10.addBlock(Unknown Source)
  559.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.locateFollowingBlock(DFSOutputStream.java:1459)
  560.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1255)
  561.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:449)
  562. put: File /input/hadoop/kms-log4j.properties._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  563. 16/05/15 17:09:12 WARN hdfs.DFSClient: DataStreamer Exception
  564. org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /input/hadoop/kms-site.xml._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  565.     at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1547)
  566.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3107)
  567.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3031)
  568.     at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:724)
  569.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:492)
  570.     at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
  571.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
  572.     at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
  573.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
  574.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
  575.     at java.security.AccessController.doPrivileged(Native Method)
  576.     at javax.security.auth.Subject.doAs(Subject.java:422)
  577.     at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
  578.     at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
  579.  
  580.     at org.apache.hadoop.ipc.Client.call(Client.java:1475)
  581.     at org.apache.hadoop.ipc.Client.call(Client.java:1412)
  582.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
  583.     at com.sun.proxy.$Proxy9.addBlock(Unknown Source)
  584.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:418)
  585.     at sun.reflect.GeneratedMethodAccessor4.invoke(Unknown Source)
  586.     at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  587.     at java.lang.reflect.Method.invoke(Method.java:497)
  588.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
  589.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
  590.     at com.sun.proxy.$Proxy10.addBlock(Unknown Source)
  591.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.locateFollowingBlock(DFSOutputStream.java:1459)
  592.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1255)
  593.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:449)
  594. put: File /input/hadoop/kms-site.xml._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  595. 16/05/15 17:09:12 WARN hdfs.DFSClient: DataStreamer Exception
  596. org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /input/hadoop/log4j.properties._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  597.     at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1547)
  598.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3107)
  599.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3031)
  600.     at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:724)
  601.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:492)
  602.     at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
  603.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
  604.     at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
  605.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
  606.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
  607.     at java.security.AccessController.doPrivileged(Native Method)
  608.     at javax.security.auth.Subject.doAs(Subject.java:422)
  609.     at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
  610.     at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
  611.  
  612.     at org.apache.hadoop.ipc.Client.call(Client.java:1475)
  613.     at org.apache.hadoop.ipc.Client.call(Client.java:1412)
  614.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
  615.     at com.sun.proxy.$Proxy9.addBlock(Unknown Source)
  616.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:418)
  617.     at sun.reflect.GeneratedMethodAccessor4.invoke(Unknown Source)
  618.     at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  619.     at java.lang.reflect.Method.invoke(Method.java:497)
  620.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
  621.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
  622.     at com.sun.proxy.$Proxy10.addBlock(Unknown Source)
  623.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.locateFollowingBlock(DFSOutputStream.java:1459)
  624.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1255)
  625.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:449)
  626. put: File /input/hadoop/log4j.properties._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  627. 16/05/15 17:09:12 WARN hdfs.DFSClient: DataStreamer Exception
  628. org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /input/hadoop/mapred-env.cmd._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  629.     at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1547)
  630.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3107)
  631.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3031)
  632.     at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:724)
  633.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:492)
  634.     at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
  635.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
  636.     at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
  637.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
  638.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
  639.     at java.security.AccessController.doPrivileged(Native Method)
  640.     at javax.security.auth.Subject.doAs(Subject.java:422)
  641.     at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
  642.     at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
  643.  
  644.     at org.apache.hadoop.ipc.Client.call(Client.java:1475)
  645.     at org.apache.hadoop.ipc.Client.call(Client.java:1412)
  646.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
  647.     at com.sun.proxy.$Proxy9.addBlock(Unknown Source)
  648.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:418)
  649.     at sun.reflect.GeneratedMethodAccessor4.invoke(Unknown Source)
  650.     at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  651.     at java.lang.reflect.Method.invoke(Method.java:497)
  652.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
  653.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
  654.     at com.sun.proxy.$Proxy10.addBlock(Unknown Source)
  655.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.locateFollowingBlock(DFSOutputStream.java:1459)
  656.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1255)
  657.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:449)
  658. put: File /input/hadoop/mapred-env.cmd._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  659. 16/05/15 17:09:12 WARN hdfs.DFSClient: DataStreamer Exception
  660. org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /input/hadoop/mapred-env.sh._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  661.     at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1547)
  662.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3107)
  663.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3031)
  664.     at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:724)
  665.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:492)
  666.     at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
  667.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
  668.     at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
  669.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
  670.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
  671.     at java.security.AccessController.doPrivileged(Native Method)
  672.     at javax.security.auth.Subject.doAs(Subject.java:422)
  673.     at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
  674.     at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
  675.  
  676.     at org.apache.hadoop.ipc.Client.call(Client.java:1475)
  677.     at org.apache.hadoop.ipc.Client.call(Client.java:1412)
  678.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
  679.     at com.sun.proxy.$Proxy9.addBlock(Unknown Source)
  680.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:418)
  681.     at sun.reflect.GeneratedMethodAccessor4.invoke(Unknown Source)
  682.     at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  683.     at java.lang.reflect.Method.invoke(Method.java:497)
  684.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
  685.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
  686.     at com.sun.proxy.$Proxy10.addBlock(Unknown Source)
  687.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.locateFollowingBlock(DFSOutputStream.java:1459)
  688.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1255)
  689.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:449)
  690. put: File /input/hadoop/mapred-env.sh._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  691. 16/05/15 17:09:12 WARN hdfs.DFSClient: DataStreamer Exception
  692. org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /input/hadoop/mapred-queues.xml.template._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  693.     at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1547)
  694.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3107)
  695.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3031)
  696.     at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:724)
  697.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:492)
  698.     at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
  699.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
  700.     at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
  701.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
  702.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
  703.     at java.security.AccessController.doPrivileged(Native Method)
  704.     at javax.security.auth.Subject.doAs(Subject.java:422)
  705.     at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
  706.     at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
  707.  
  708.     at org.apache.hadoop.ipc.Client.call(Client.java:1475)
  709.     at org.apache.hadoop.ipc.Client.call(Client.java:1412)
  710.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
  711.     at com.sun.proxy.$Proxy9.addBlock(Unknown Source)
  712.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:418)
  713.     at sun.reflect.GeneratedMethodAccessor4.invoke(Unknown Source)
  714.     at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  715.     at java.lang.reflect.Method.invoke(Method.java:497)
  716.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
  717.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
  718.     at com.sun.proxy.$Proxy10.addBlock(Unknown Source)
  719.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.locateFollowingBlock(DFSOutputStream.java:1459)
  720.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1255)
  721.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:449)
  722. put: File /input/hadoop/mapred-queues.xml.template._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  723. 16/05/15 17:09:12 WARN hdfs.DFSClient: DataStreamer Exception
  724. org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /input/hadoop/mapred-site.xml.template._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  725.     at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1547)
  726.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3107)
  727.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3031)
  728.     at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:724)
  729.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:492)
  730.     at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
  731.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
  732.     at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
  733.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
  734.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
  735.     at java.security.AccessController.doPrivileged(Native Method)
  736.     at javax.security.auth.Subject.doAs(Subject.java:422)
  737.     at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
  738.     at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
  739.  
  740.     at org.apache.hadoop.ipc.Client.call(Client.java:1475)
  741.     at org.apache.hadoop.ipc.Client.call(Client.java:1412)
  742.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
  743.     at com.sun.proxy.$Proxy9.addBlock(Unknown Source)
  744.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:418)
  745.     at sun.reflect.GeneratedMethodAccessor4.invoke(Unknown Source)
  746.     at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  747.     at java.lang.reflect.Method.invoke(Method.java:497)
  748.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
  749.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
  750.     at com.sun.proxy.$Proxy10.addBlock(Unknown Source)
  751.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.locateFollowingBlock(DFSOutputStream.java:1459)
  752.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1255)
  753.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:449)
  754. put: File /input/hadoop/mapred-site.xml.template._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  755. 16/05/15 17:09:12 WARN hdfs.DFSClient: DataStreamer Exception
  756. org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /input/hadoop/slaves._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  757.     at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1547)
  758.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3107)
  759.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3031)
  760.     at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:724)
  761.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:492)
  762.     at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
  763.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
  764.     at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
  765.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
  766.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
  767.     at java.security.AccessController.doPrivileged(Native Method)
  768.     at javax.security.auth.Subject.doAs(Subject.java:422)
  769.     at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
  770.     at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
  771.  
  772.     at org.apache.hadoop.ipc.Client.call(Client.java:1475)
  773.     at org.apache.hadoop.ipc.Client.call(Client.java:1412)
  774.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
  775.     at com.sun.proxy.$Proxy9.addBlock(Unknown Source)
  776.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:418)
  777.     at sun.reflect.GeneratedMethodAccessor4.invoke(Unknown Source)
  778.     at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  779.     at java.lang.reflect.Method.invoke(Method.java:497)
  780.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
  781.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
  782.     at com.sun.proxy.$Proxy10.addBlock(Unknown Source)
  783.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.locateFollowingBlock(DFSOutputStream.java:1459)
  784.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1255)
  785.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:449)
  786. put: File /input/hadoop/slaves._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  787. 16/05/15 17:09:12 WARN hdfs.DFSClient: DataStreamer Exception
  788. org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /input/hadoop/ssl-client.xml.example._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  789.     at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1547)
  790.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3107)
  791.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3031)
  792.     at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:724)
  793.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:492)
  794.     at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
  795.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
  796.     at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
  797.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
  798.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
  799.     at java.security.AccessController.doPrivileged(Native Method)
  800.     at javax.security.auth.Subject.doAs(Subject.java:422)
  801.     at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
  802.     at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
  803.  
  804.     at org.apache.hadoop.ipc.Client.call(Client.java:1475)
  805.     at org.apache.hadoop.ipc.Client.call(Client.java:1412)
  806.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
  807.     at com.sun.proxy.$Proxy9.addBlock(Unknown Source)
  808.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:418)
  809.     at sun.reflect.GeneratedMethodAccessor4.invoke(Unknown Source)
  810.     at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  811.     at java.lang.reflect.Method.invoke(Method.java:497)
  812.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
  813.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
  814.     at com.sun.proxy.$Proxy10.addBlock(Unknown Source)
  815.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.locateFollowingBlock(DFSOutputStream.java:1459)
  816.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1255)
  817.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:449)
  818. put: File /input/hadoop/ssl-client.xml.example._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  819. 16/05/15 17:09:12 WARN hdfs.DFSClient: DataStreamer Exception
  820. org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /input/hadoop/ssl-server.xml.example._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  821.     at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1547)
  822.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3107)
  823.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3031)
  824.     at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:724)
  825.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:492)
  826.     at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
  827.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
  828.     at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
  829.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
  830.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
  831.     at java.security.AccessController.doPrivileged(Native Method)
  832.     at javax.security.auth.Subject.doAs(Subject.java:422)
  833.     at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
  834.     at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
  835.  
  836.     at org.apache.hadoop.ipc.Client.call(Client.java:1475)
  837.     at org.apache.hadoop.ipc.Client.call(Client.java:1412)
  838.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
  839.     at com.sun.proxy.$Proxy9.addBlock(Unknown Source)
  840.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:418)
  841.     at sun.reflect.GeneratedMethodAccessor4.invoke(Unknown Source)
  842.     at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  843.     at java.lang.reflect.Method.invoke(Method.java:497)
  844.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
  845.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
  846.     at com.sun.proxy.$Proxy10.addBlock(Unknown Source)
  847.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.locateFollowingBlock(DFSOutputStream.java:1459)
  848.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1255)
  849.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:449)
  850. put: File /input/hadoop/ssl-server.xml.example._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  851. 16/05/15 17:09:12 WARN hdfs.DFSClient: DataStreamer Exception
  852. org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /input/hadoop/yarn-env.cmd._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  853.     at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1547)
  854.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3107)
  855.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3031)
  856.     at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:724)
  857.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:492)
  858.     at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
  859.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
  860.     at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
  861.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
  862.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
  863.     at java.security.AccessController.doPrivileged(Native Method)
  864.     at javax.security.auth.Subject.doAs(Subject.java:422)
  865.     at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
  866.     at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
  867.  
  868.     at org.apache.hadoop.ipc.Client.call(Client.java:1475)
  869.     at org.apache.hadoop.ipc.Client.call(Client.java:1412)
  870.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
  871.     at com.sun.proxy.$Proxy9.addBlock(Unknown Source)
  872.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:418)
  873.     at sun.reflect.GeneratedMethodAccessor4.invoke(Unknown Source)
  874.     at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  875.     at java.lang.reflect.Method.invoke(Method.java:497)
  876.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
  877.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
  878.     at com.sun.proxy.$Proxy10.addBlock(Unknown Source)
  879.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.locateFollowingBlock(DFSOutputStream.java:1459)
  880.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1255)
  881.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:449)
  882. put: File /input/hadoop/yarn-env.cmd._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  883. 16/05/15 17:09:12 WARN hdfs.DFSClient: DataStreamer Exception
  884. org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /input/hadoop/yarn-env.sh._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  885.     at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1547)
  886.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3107)
  887.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3031)
  888.     at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:724)
  889.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:492)
  890.     at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
  891.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
  892.     at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
  893.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
  894.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
  895.     at java.security.AccessController.doPrivileged(Native Method)
  896.     at javax.security.auth.Subject.doAs(Subject.java:422)
  897.     at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
  898.     at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
  899.  
  900.     at org.apache.hadoop.ipc.Client.call(Client.java:1475)
  901.     at org.apache.hadoop.ipc.Client.call(Client.java:1412)
  902.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
  903.     at com.sun.proxy.$Proxy9.addBlock(Unknown Source)
  904.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:418)
  905.     at sun.reflect.GeneratedMethodAccessor4.invoke(Unknown Source)
  906.     at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  907.     at java.lang.reflect.Method.invoke(Method.java:497)
  908.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
  909.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
  910.     at com.sun.proxy.$Proxy10.addBlock(Unknown Source)
  911.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.locateFollowingBlock(DFSOutputStream.java:1459)
  912.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1255)
  913.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:449)
  914. put: File /input/hadoop/yarn-env.sh._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  915. 16/05/15 17:09:12 WARN hdfs.DFSClient: DataStreamer Exception
  916. org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /input/hadoop/yarn-site.xml._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
  917.     at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1547)
  918.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3107)
  919.     at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3031)
  920.     at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:724)
  921.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:492)
  922.     at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
  923.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
  924.     at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
  925.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
  926.     at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
  927.     at java.security.AccessController.doPrivileged(Native Method)
  928.     at javax.security.auth.Subject.doAs(Subject.java:422)
  929.     at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
  930.     at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
  931.  
  932.     at org.apache.hadoop.ipc.Client.call(Client.java:1475)
  933.     at org.apache.hadoop.ipc.Client.call(Client.java:1412)
  934.     at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
  935.     at com.sun.proxy.$Proxy9.addBlock(Unknown Source)
  936.     at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:418)
  937.     at sun.reflect.GeneratedMethodAccessor4.invoke(Unknown Source)
  938.     at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  939.     at java.lang.reflect.Method.invoke(Method.java:497)
  940.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
  941.     at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
  942.     at com.sun.proxy.$Proxy10.addBlock(Unknown Source)
  943.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.locateFollowingBlock(DFSOutputStream.java:1459)
  944.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1255)
  945.     at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:449)
  946. put: File /input/hadoop/yarn-site.xml._COPYING_ could only be replicated to 0 nodes instead of minReplication (=1).  There are 0 datanode(s) running and no node(s) are excluded in this operation.
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement