|
@@ -18,7 +18,26 @@
|
|
|
|
|
|
package org.apache.hadoop.hdfs.server.common;
|
|
|
|
|
|
-import com.google.common.base.Charsets;
|
|
|
+import static org.apache.hadoop.fs.CommonConfigurationKeys.DEFAULT_HADOOP_HTTP_STATIC_USER;
|
|
|
+import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_HTTP_STATIC_USER;
|
|
|
+
|
|
|
+import java.io.ByteArrayInputStream;
|
|
|
+import java.io.DataInputStream;
|
|
|
+import java.io.IOException;
|
|
|
+import java.io.UnsupportedEncodingException;
|
|
|
+import java.net.InetSocketAddress;
|
|
|
+import java.net.Socket;
|
|
|
+import java.net.URL;
|
|
|
+import java.net.URLEncoder;
|
|
|
+import java.util.Arrays;
|
|
|
+import java.util.Collections;
|
|
|
+import java.util.Comparator;
|
|
|
+import java.util.HashMap;
|
|
|
+import java.util.List;
|
|
|
+
|
|
|
+import javax.servlet.ServletContext;
|
|
|
+import javax.servlet.http.HttpServletRequest;
|
|
|
+import javax.servlet.jsp.JspWriter;
|
|
|
|
|
|
import org.apache.commons.logging.Log;
|
|
|
import org.apache.commons.logging.LogFactory;
|
|
@@ -33,7 +52,11 @@ import org.apache.hadoop.hdfs.DFSUtil;
|
|
|
import org.apache.hadoop.hdfs.RemotePeerFactory;
|
|
|
import org.apache.hadoop.hdfs.net.Peer;
|
|
|
import org.apache.hadoop.hdfs.net.TcpPeerServer;
|
|
|
-import org.apache.hadoop.hdfs.protocol.*;
|
|
|
+import org.apache.hadoop.hdfs.protocol.DatanodeID;
|
|
|
+import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
|
|
|
+import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
|
|
|
+import org.apache.hadoop.hdfs.protocol.LocatedBlock;
|
|
|
+import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
|
|
|
import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
|
|
|
import org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey;
|
|
|
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
|
|
@@ -56,22 +79,7 @@ import org.apache.hadoop.security.authorize.ProxyUsers;
|
|
|
import org.apache.hadoop.security.token.Token;
|
|
|
import org.apache.hadoop.util.VersionInfo;
|
|
|
|
|
|
-import javax.servlet.ServletContext;
|
|
|
-import javax.servlet.http.HttpServletRequest;
|
|
|
-import javax.servlet.jsp.JspWriter;
|
|
|
-
|
|
|
-import java.io.ByteArrayInputStream;
|
|
|
-import java.io.DataInputStream;
|
|
|
-import java.io.IOException;
|
|
|
-import java.io.UnsupportedEncodingException;
|
|
|
-import java.net.InetSocketAddress;
|
|
|
-import java.net.Socket;
|
|
|
-import java.net.URL;
|
|
|
-import java.net.URLEncoder;
|
|
|
-import java.util.*;
|
|
|
-
|
|
|
-import static org.apache.hadoop.fs.CommonConfigurationKeys.DEFAULT_HADOOP_HTTP_STATIC_USER;
|
|
|
-import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_HTTP_STATIC_USER;
|
|
|
+import com.google.common.base.Charsets;
|
|
|
|
|
|
@InterfaceAudience.Private
|
|
|
public class JspHelper {
|
|
@@ -171,58 +179,31 @@ public class JspHelper {
|
|
|
}
|
|
|
NodeRecord[] nodes = map.values().toArray(new NodeRecord[map.size()]);
|
|
|
Arrays.sort(nodes, new NodeRecordComparator());
|
|
|
- return bestNode(nodes, false, conf);
|
|
|
+ return bestNode(nodes, false);
|
|
|
}
|
|
|
|
|
|
public static DatanodeInfo bestNode(LocatedBlock blk, Configuration conf)
|
|
|
throws IOException {
|
|
|
DatanodeInfo[] nodes = blk.getLocations();
|
|
|
- return bestNode(nodes, true, conf);
|
|
|
+ return bestNode(nodes, true);
|
|
|
}
|
|
|
|
|
|
- public static DatanodeInfo bestNode(DatanodeInfo[] nodes, boolean doRandom,
|
|
|
- Configuration conf) throws IOException {
|
|
|
- TreeSet<DatanodeInfo> deadNodes = new TreeSet<DatanodeInfo>();
|
|
|
- DatanodeInfo chosenNode = null;
|
|
|
- int failures = 0;
|
|
|
- Socket s = null;
|
|
|
- int index = -1;
|
|
|
+ private static DatanodeInfo bestNode(DatanodeInfo[] nodes, boolean doRandom)
|
|
|
+ throws IOException {
|
|
|
if (nodes == null || nodes.length == 0) {
|
|
|
throw new IOException("No nodes contain this block");
|
|
|
}
|
|
|
- while (s == null) {
|
|
|
- if (chosenNode == null) {
|
|
|
- do {
|
|
|
- if (doRandom) {
|
|
|
- index = DFSUtil.getRandom().nextInt(nodes.length);
|
|
|
- } else {
|
|
|
- index++;
|
|
|
- }
|
|
|
- chosenNode = nodes[index];
|
|
|
- } while (deadNodes.contains(chosenNode));
|
|
|
- }
|
|
|
- chosenNode = nodes[index];
|
|
|
+ int l = 0;
|
|
|
+ while (l < nodes.length && !nodes[l].isDecommissioned()) {
|
|
|
+ ++l;
|
|
|
+ }
|
|
|
|
|
|
- //just ping to check whether the node is alive
|
|
|
- InetSocketAddress targetAddr = NetUtils.createSocketAddr(
|
|
|
- chosenNode.getInfoAddr());
|
|
|
-
|
|
|
- try {
|
|
|
- s = NetUtils.getDefaultSocketFactory(conf).createSocket();
|
|
|
- s.connect(targetAddr, HdfsServerConstants.READ_TIMEOUT);
|
|
|
- s.setSoTimeout(HdfsServerConstants.READ_TIMEOUT);
|
|
|
- } catch (IOException e) {
|
|
|
- deadNodes.add(chosenNode);
|
|
|
- IOUtils.closeSocket(s);
|
|
|
- s = null;
|
|
|
- failures++;
|
|
|
- }
|
|
|
- if (failures == nodes.length)
|
|
|
- throw new IOException("Could not reach the block containing the data. Please try again");
|
|
|
-
|
|
|
+ if (l == 0) {
|
|
|
+ throw new IOException("No active nodes contain this block");
|
|
|
}
|
|
|
- s.close();
|
|
|
- return chosenNode;
|
|
|
+
|
|
|
+ int index = doRandom ? DFSUtil.getRandom().nextInt(l) : 0;
|
|
|
+ return nodes[index];
|
|
|
}
|
|
|
|
|
|
public static void streamBlockInAscii(InetSocketAddress addr, String poolId,
|