932
社区成员
发帖
与我相关
我的任务
分享
[root@localhost hadoop-0.20]# hadoop fs -put ~/Desktop/hadoop-book-master/test-data/ch1/file*.txt /
16/01/29 18:56:46 WARN hdfs.DFSClient: DataStreamer Exception: org.apache.hadoop.ipc.RemoteException: java.io.IOException: File /file1.txt could only be replicated to 0 nodes, instead of 1
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:1576)
at org.apache.hadoop.hdfs.server.namenode.NameNode.addBlock(NameNode.java:771)
16/01/29 18:56:46 WARN hdfs.DFSClient: Error Recovery for block null bad datanode[0] nodes == null
16/01/29 18:56:46 WARN hdfs.DFSClient: Could not get block locations. Source file "/file1.txt" - Aborting...
16/01/29 18:56:46 WARN hdfs.DFSClient: DataStreamer Exception: org.apache.hadoop.ipc.RemoteException: java.io.IOException: File /file2.txt could only be replicated to 0 nodes, instead of 1
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:1576)
at org.apache.hadoop.hdfs.server.namenode.NameNode.addBlock(NameNode.java:771)
16/01/29 18:56:46 WARN hdfs.DFSClient: Error Recovery for block null bad datanode[0] nodes == null
16/01/29 18:56:46 WARN hdfs.DFSClient: Could not get block locations. Source file "/file2.txt" - Aborting...
put: java.io.IOException: File /file1.txt could only be replicated to 0 nodes, instead of 1
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:1576)
at org.apache.hadoop.hdfs.server.namenode.NameNode.addBlock(NameNode.java:771)
java.io.IOException: File /file2.txt could only be replicated to 0 nodes, instead of 1
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:1576)
16/01/29 18:56:46 ERROR hdfs.DFSClient: Exception closing file /file1.txt : org.apache.hadoop.ipc.RemoteException: java.io.IOException: File /file1.txt could only be replicated to 0 nodes, instead of 1
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:1576)
org.apache.hadoop.ipc.RemoteException: java.io.IOException: File /file1.txt could only be replicated to 0 nodes, instead of 1
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:1576)
16/01/29 18:56:46 ERROR hdfs.DFSClient: Exception closing file /file2.txt : org.apache.hadoop.ipc.RemoteException: java.io.IOException: File /file2.txt could only be replicated to 0 nodes, instead of 1
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:1576)
at org.apache.hadoop.hdfs.server.namenode.NameNode.addBlock(NameNode.java:771)
org.apache.hadoop.ipc.RemoteException: java.io.IOException: File /file2.txt could only be replicated to 0 nodes, instead of 1
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:1576)
at org.apache.hadoop.hdfs.server.namenode.NameNode.addBlock(NameNode.java:771)
[root@localhost hadoop-0.20]# hadoop fs -ls /
Found 3 items
-rw-r--r-- 1 root supergroup 0 2016-01-29 18:56 /file1.txt
-rw-r--r-- 1 root supergroup 0 2016-01-29 18:56 /file2.txt
drwxr-xr-x - mapred supergroup 0 2016-01-29 18:30 /var