|
@@ -28,14 +28,19 @@ import java.util.ArrayList;
|
|
import java.util.Arrays;
|
|
import java.util.Arrays;
|
|
import java.util.Collection;
|
|
import java.util.Collection;
|
|
import java.util.Collections;
|
|
import java.util.Collections;
|
|
|
|
+import java.util.Date;
|
|
import java.util.HashMap;
|
|
import java.util.HashMap;
|
|
import java.util.List;
|
|
import java.util.List;
|
|
|
|
+import java.util.Map;
|
|
import java.util.TreeSet;
|
|
import java.util.TreeSet;
|
|
|
|
|
|
|
|
+import com.google.common.base.Optional;
|
|
import org.apache.commons.logging.Log;
|
|
import org.apache.commons.logging.Log;
|
|
import org.apache.commons.logging.LogFactory;
|
|
import org.apache.commons.logging.LogFactory;
|
|
import org.apache.hadoop.classification.InterfaceAudience;
|
|
import org.apache.hadoop.classification.InterfaceAudience;
|
|
import org.apache.hadoop.conf.Configuration;
|
|
import org.apache.hadoop.conf.Configuration;
|
|
|
|
+import org.apache.hadoop.conf.ReconfigurationTaskStatus;
|
|
|
|
+import org.apache.hadoop.conf.ReconfigurationUtil.PropertyChange;
|
|
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
|
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
|
import org.apache.hadoop.fs.FileSystem;
|
|
import org.apache.hadoop.fs.FileSystem;
|
|
import org.apache.hadoop.fs.FsShell;
|
|
import org.apache.hadoop.fs.FsShell;
|
|
@@ -374,6 +379,7 @@ public class DFSAdmin extends FsShell {
|
|
"\t[-refreshSuperUserGroupsConfiguration]\n" +
|
|
"\t[-refreshSuperUserGroupsConfiguration]\n" +
|
|
"\t[-refreshCallQueue]\n" +
|
|
"\t[-refreshCallQueue]\n" +
|
|
"\t[-refresh <host:ipc_port> <key> [arg1..argn]\n" +
|
|
"\t[-refresh <host:ipc_port> <key> [arg1..argn]\n" +
|
|
|
|
+ "\t[-reconfig <datanode|...> <host:ipc_port> <start|status>]\n" +
|
|
"\t[-printTopology]\n" +
|
|
"\t[-printTopology]\n" +
|
|
"\t[-refreshNamenodes datanode_host:ipc_port]\n"+
|
|
"\t[-refreshNamenodes datanode_host:ipc_port]\n"+
|
|
"\t[-deleteBlockPool datanode_host:ipc_port blockpoolId [force]]\n"+
|
|
"\t[-deleteBlockPool datanode_host:ipc_port blockpoolId [force]]\n"+
|
|
@@ -880,9 +886,14 @@ public class DFSAdmin extends FsShell {
|
|
|
|
|
|
String refreshCallQueue = "-refreshCallQueue: Reload the call queue from config\n";
|
|
String refreshCallQueue = "-refreshCallQueue: Reload the call queue from config\n";
|
|
|
|
|
|
|
|
+ String reconfig = "-reconfig <datanode|...> <host:ipc_port> <start|status>:\n" +
|
|
|
|
+ "\tStarts reconfiguration or gets the status of an ongoing reconfiguration.\n" +
|
|
|
|
+ "\tThe second parameter specifies the node type.\n" +
|
|
|
|
+ "\tCurrently, only reloading DataNode's configuration is supported.\n";
|
|
|
|
+
|
|
String genericRefresh = "-refresh: Arguments are <hostname:port> <resource_identifier> [arg1..argn]\n" +
|
|
String genericRefresh = "-refresh: Arguments are <hostname:port> <resource_identifier> [arg1..argn]\n" +
|
|
"\tTriggers a runtime-refresh of the resource specified by <resource_identifier>\n" +
|
|
"\tTriggers a runtime-refresh of the resource specified by <resource_identifier>\n" +
|
|
- "\ton <hostname:port>. All other args after are sent to the host.";
|
|
|
|
|
|
+ "\ton <hostname:port>. All other args after are sent to the host.\n";
|
|
|
|
|
|
String printTopology = "-printTopology: Print a tree of the racks and their\n" +
|
|
String printTopology = "-printTopology: Print a tree of the racks and their\n" +
|
|
"\t\tnodes as reported by the Namenode\n";
|
|
"\t\tnodes as reported by the Namenode\n";
|
|
@@ -970,6 +981,8 @@ public class DFSAdmin extends FsShell {
|
|
System.out.println(refreshCallQueue);
|
|
System.out.println(refreshCallQueue);
|
|
} else if ("refresh".equals(cmd)) {
|
|
} else if ("refresh".equals(cmd)) {
|
|
System.out.println(genericRefresh);
|
|
System.out.println(genericRefresh);
|
|
|
|
+ } else if ("reconfig".equals(cmd)) {
|
|
|
|
+ System.out.println(reconfig);
|
|
} else if ("printTopology".equals(cmd)) {
|
|
} else if ("printTopology".equals(cmd)) {
|
|
System.out.println(printTopology);
|
|
System.out.println(printTopology);
|
|
} else if ("refreshNamenodes".equals(cmd)) {
|
|
} else if ("refreshNamenodes".equals(cmd)) {
|
|
@@ -1010,6 +1023,7 @@ public class DFSAdmin extends FsShell {
|
|
System.out.println(refreshSuperUserGroupsConfiguration);
|
|
System.out.println(refreshSuperUserGroupsConfiguration);
|
|
System.out.println(refreshCallQueue);
|
|
System.out.println(refreshCallQueue);
|
|
System.out.println(genericRefresh);
|
|
System.out.println(genericRefresh);
|
|
|
|
+ System.out.println(reconfig);
|
|
System.out.println(printTopology);
|
|
System.out.println(printTopology);
|
|
System.out.println(refreshNamenodes);
|
|
System.out.println(refreshNamenodes);
|
|
System.out.println(deleteBlockPool);
|
|
System.out.println(deleteBlockPool);
|
|
@@ -1317,6 +1331,75 @@ public class DFSAdmin extends FsShell {
|
|
return 0;
|
|
return 0;
|
|
}
|
|
}
|
|
|
|
|
|
|
|
+ public int reconfig(String[] argv, int i) throws IOException {
|
|
|
|
+ String nodeType = argv[i];
|
|
|
|
+ String address = argv[i + 1];
|
|
|
|
+ String op = argv[i + 2];
|
|
|
|
+ if ("start".equals(op)) {
|
|
|
|
+ return startReconfiguration(nodeType, address);
|
|
|
|
+ } else if ("status".equals(op)) {
|
|
|
|
+ return getReconfigurationStatus(nodeType, address, System.out, System.err);
|
|
|
|
+ }
|
|
|
|
+ System.err.println("Unknown operation: " + op);
|
|
|
|
+ return -1;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ int startReconfiguration(String nodeType, String address) throws IOException {
|
|
|
|
+ if ("datanode".equals(nodeType)) {
|
|
|
|
+ ClientDatanodeProtocol dnProxy = getDataNodeProxy(address);
|
|
|
|
+ dnProxy.startReconfiguration();
|
|
|
|
+ System.out.println("Started reconfiguration task on DataNode " + address);
|
|
|
|
+ } else {
|
|
|
|
+ System.err.println("Node type " + nodeType +
|
|
|
|
+ " does not support reconfiguration.");
|
|
|
|
+ }
|
|
|
|
+ return -1;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ int getReconfigurationStatus(String nodeType, String address,
|
|
|
|
+ PrintStream out, PrintStream err) throws IOException {
|
|
|
|
+ if ("datanode".equals(nodeType)) {
|
|
|
|
+ ClientDatanodeProtocol dnProxy = getDataNodeProxy(address);
|
|
|
|
+ try {
|
|
|
|
+ ReconfigurationTaskStatus status = dnProxy.getReconfigurationStatus();
|
|
|
|
+ out.print("Reconfiguring status for DataNode[" + address + "]: ");
|
|
|
|
+ if (!status.hasTask()) {
|
|
|
|
+ out.println("no task was found.");
|
|
|
|
+ return 0;
|
|
|
|
+ }
|
|
|
|
+ out.print("started at " + new Date(status.getStartTime()));
|
|
|
|
+ if (!status.stopped()) {
|
|
|
|
+ out.println(" and is still running.");
|
|
|
|
+ return 0;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ out.println(" and finished at " +
|
|
|
|
+ new Date(status.getEndTime()).toString() + ".");
|
|
|
|
+ for (Map.Entry<PropertyChange, Optional<String>> result :
|
|
|
|
+ status.getStatus().entrySet()) {
|
|
|
|
+ if (!result.getValue().isPresent()) {
|
|
|
|
+ out.print("SUCCESS: ");
|
|
|
|
+ } else {
|
|
|
|
+ out.print("FAILED: ");
|
|
|
|
+ }
|
|
|
|
+ out.printf("Change property %s\n\tFrom: \"%s\"\n\tTo: \"%s\"\n",
|
|
|
|
+ result.getKey().prop, result.getKey().oldVal,
|
|
|
|
+ result.getKey().newVal);
|
|
|
|
+ if (result.getValue().isPresent()) {
|
|
|
|
+ out.println("\tError: " + result.getValue().get() + ".");
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ } catch (IOException e) {
|
|
|
|
+ err.println("DataNode reloading configuration: " + e + ".");
|
|
|
|
+ return -1;
|
|
|
|
+ }
|
|
|
|
+ } else {
|
|
|
|
+ err.println("Node type " + nodeType + " does not support reconfiguration.");
|
|
|
|
+ return -1;
|
|
|
|
+ }
|
|
|
|
+ return 0;
|
|
|
|
+ }
|
|
|
|
+
|
|
public int genericRefresh(String[] argv, int i) throws IOException {
|
|
public int genericRefresh(String[] argv, int i) throws IOException {
|
|
String hostport = argv[i++];
|
|
String hostport = argv[i++];
|
|
String identifier = argv[i++];
|
|
String identifier = argv[i++];
|
|
@@ -1429,6 +1512,9 @@ public class DFSAdmin extends FsShell {
|
|
} else if ("-refreshCallQueue".equals(cmd)) {
|
|
} else if ("-refreshCallQueue".equals(cmd)) {
|
|
System.err.println("Usage: hdfs dfsadmin"
|
|
System.err.println("Usage: hdfs dfsadmin"
|
|
+ " [-refreshCallQueue]");
|
|
+ " [-refreshCallQueue]");
|
|
|
|
+ } else if ("-reconfig".equals(cmd)) {
|
|
|
|
+ System.err.println("Usage: java DFSAdmin"
|
|
|
|
+ + " [-reconfig <datanode|...> <host:port> <start|status>]");
|
|
} else if ("-refresh".equals(cmd)) {
|
|
} else if ("-refresh".equals(cmd)) {
|
|
System.err.println("Usage: hdfs dfsadmin"
|
|
System.err.println("Usage: hdfs dfsadmin"
|
|
+ " [-refresh <hostname:port> <resource_identifier> [arg1..argn]");
|
|
+ " [-refresh <hostname:port> <resource_identifier> [arg1..argn]");
|
|
@@ -1561,6 +1647,11 @@ public class DFSAdmin extends FsShell {
|
|
printUsage(cmd);
|
|
printUsage(cmd);
|
|
return exitCode;
|
|
return exitCode;
|
|
}
|
|
}
|
|
|
|
+ } else if ("-reconfig".equals(cmd)) {
|
|
|
|
+ if (argv.length != 4) {
|
|
|
|
+ printUsage(cmd);
|
|
|
|
+ return exitCode;
|
|
|
|
+ }
|
|
} else if ("-deleteBlockPool".equals(cmd)) {
|
|
} else if ("-deleteBlockPool".equals(cmd)) {
|
|
if ((argv.length != 3) && (argv.length != 4)) {
|
|
if ((argv.length != 3) && (argv.length != 4)) {
|
|
printUsage(cmd);
|
|
printUsage(cmd);
|
|
@@ -1657,6 +1748,8 @@ public class DFSAdmin extends FsShell {
|
|
exitCode = shutdownDatanode(argv, i);
|
|
exitCode = shutdownDatanode(argv, i);
|
|
} else if ("-getDatanodeInfo".equals(cmd)) {
|
|
} else if ("-getDatanodeInfo".equals(cmd)) {
|
|
exitCode = getDatanodeInfo(argv, i);
|
|
exitCode = getDatanodeInfo(argv, i);
|
|
|
|
+ } else if ("-reconfig".equals(cmd)) {
|
|
|
|
+ exitCode = reconfig(argv, i);
|
|
} else if ("-help".equals(cmd)) {
|
|
} else if ("-help".equals(cmd)) {
|
|
if (i < argv.length) {
|
|
if (i < argv.length) {
|
|
printHelp(argv[i]);
|
|
printHelp(argv[i]);
|