|
@@ -25,7 +25,6 @@ import org.apache.hadoop.fs.*;
|
|
|
import org.apache.hadoop.fs.permission.FsPermission;
|
|
|
import org.apache.hadoop.ipc.*;
|
|
|
import org.apache.hadoop.net.NetUtils;
|
|
|
-import org.apache.hadoop.net.NodeBase;
|
|
|
import org.apache.hadoop.classification.InterfaceAudience;
|
|
|
import org.apache.hadoop.conf.*;
|
|
|
import org.apache.hadoop.hdfs.DistributedFileSystem.DiskStatus;
|
|
@@ -780,8 +779,11 @@ public class DFSClient implements FSConstants, java.io.Closeable {
|
|
|
ClientProtocol namenode, SocketFactory socketFactory, int socketTimeout
|
|
|
) throws IOException {
|
|
|
//get all block locations
|
|
|
- List<LocatedBlock> locatedblocks
|
|
|
- = callGetBlockLocations(namenode, src, 0, Long.MAX_VALUE).getLocatedBlocks();
|
|
|
+ LocatedBlocks blockLocations = callGetBlockLocations(namenode, src, 0, Long.MAX_VALUE);
|
|
|
+ if (null == blockLocations) {
|
|
|
+ throw new FileNotFoundException("File does not exist: " + src);
|
|
|
+ }
|
|
|
+ List<LocatedBlock> locatedblocks = blockLocations.getLocatedBlocks();
|
|
|
final DataOutputBuffer md5out = new DataOutputBuffer();
|
|
|
int bytesPerCRC = 0;
|
|
|
long crcPerBlock = 0;
|
|
@@ -791,8 +793,11 @@ public class DFSClient implements FSConstants, java.io.Closeable {
|
|
|
//get block checksum for each block
|
|
|
for(int i = 0; i < locatedblocks.size(); i++) {
|
|
|
if (refetchBlocks) { // refetch to get fresh tokens
|
|
|
- locatedblocks = callGetBlockLocations(namenode, src, 0, Long.MAX_VALUE)
|
|
|
- .getLocatedBlocks();
|
|
|
+ blockLocations = callGetBlockLocations(namenode, src, 0, Long.MAX_VALUE);
|
|
|
+ if (null == blockLocations) {
|
|
|
+ throw new FileNotFoundException("File does not exist: " + src);
|
|
|
+ }
|
|
|
+ locatedblocks = blockLocations.getLocatedBlocks();
|
|
|
refetchBlocks = false;
|
|
|
}
|
|
|
LocatedBlock lb = locatedblocks.get(i);
|