|
@@ -19,58 +19,85 @@
|
|
package org.apache.hadoop.hdfs.util;
|
|
package org.apache.hadoop.hdfs.util;
|
|
|
|
|
|
import com.fasterxml.jackson.core.JsonFactory;
|
|
import com.fasterxml.jackson.core.JsonFactory;
|
|
|
|
+import com.fasterxml.jackson.databind.JsonMappingException;
|
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
|
import com.fasterxml.jackson.databind.ObjectReader;
|
|
import com.fasterxml.jackson.databind.ObjectReader;
|
|
|
|
+
|
|
import java.io.FileInputStream;
|
|
import java.io.FileInputStream;
|
|
import java.io.InputStreamReader;
|
|
import java.io.InputStreamReader;
|
|
import java.io.IOException;
|
|
import java.io.IOException;
|
|
import java.io.Reader;
|
|
import java.io.Reader;
|
|
|
|
+import java.util.ArrayList;
|
|
import java.util.Iterator;
|
|
import java.util.Iterator;
|
|
-import java.util.Set;
|
|
|
|
-import java.util.HashSet;
|
|
|
|
|
|
+import java.util.List;
|
|
|
|
|
|
import org.apache.hadoop.classification.InterfaceAudience;
|
|
import org.apache.hadoop.classification.InterfaceAudience;
|
|
import org.apache.hadoop.classification.InterfaceStability;
|
|
import org.apache.hadoop.classification.InterfaceStability;
|
|
import org.apache.hadoop.hdfs.protocol.DatanodeAdminProperties;
|
|
import org.apache.hadoop.hdfs.protocol.DatanodeAdminProperties;
|
|
|
|
|
|
|
|
+import org.slf4j.Logger;
|
|
|
|
+import org.slf4j.LoggerFactory;
|
|
|
|
+
|
|
/**
|
|
/**
|
|
- * Reader support for JSON based datanode configuration, an alternative
|
|
|
|
|
|
+ * Reader support for JSON-based datanode configuration, an alternative format
|
|
* to the exclude/include files configuration.
|
|
* to the exclude/include files configuration.
|
|
- * The JSON file format is the array of elements where each element
|
|
|
|
|
|
+ * The JSON file format defines the array of elements where each element
|
|
* in the array describes the properties of a datanode. The properties of
|
|
* in the array describes the properties of a datanode. The properties of
|
|
- * a datanode is defined in {@link DatanodeAdminProperties}. For example,
|
|
|
|
|
|
+ * a datanode is defined by {@link DatanodeAdminProperties}. For example,
|
|
*
|
|
*
|
|
- * {"hostName": "host1"}
|
|
|
|
- * {"hostName": "host2", "port": 50, "upgradeDomain": "ud0"}
|
|
|
|
- * {"hostName": "host3", "port": 0, "adminState": "DECOMMISSIONED"}
|
|
|
|
|
|
+ * [
|
|
|
|
+ * {"hostName": "host1"},
|
|
|
|
+ * {"hostName": "host2", "port": 50, "upgradeDomain": "ud0"},
|
|
|
|
+ * {"hostName": "host3", "port": 0, "adminState": "DECOMMISSIONED"}
|
|
|
|
+ * ]
|
|
*/
|
|
*/
|
|
@InterfaceAudience.LimitedPrivate({"HDFS"})
|
|
@InterfaceAudience.LimitedPrivate({"HDFS"})
|
|
@InterfaceStability.Unstable
|
|
@InterfaceStability.Unstable
|
|
public final class CombinedHostsFileReader {
|
|
public final class CombinedHostsFileReader {
|
|
- private static final ObjectReader READER =
|
|
|
|
- new ObjectMapper().readerFor(DatanodeAdminProperties.class);
|
|
|
|
- private static final JsonFactory JSON_FACTORY = new JsonFactory();
|
|
|
|
|
|
+
|
|
|
|
+ public static final Logger LOG =
|
|
|
|
+ LoggerFactory.getLogger(CombinedHostsFileReader.class);
|
|
|
|
|
|
private CombinedHostsFileReader() {
|
|
private CombinedHostsFileReader() {
|
|
}
|
|
}
|
|
|
|
|
|
/**
|
|
/**
|
|
* Deserialize a set of DatanodeAdminProperties from a json file.
|
|
* Deserialize a set of DatanodeAdminProperties from a json file.
|
|
- * @param hostsFile the input json file to read from.
|
|
|
|
|
|
+ * @param hostsFile the input json file to read from
|
|
* @return the set of DatanodeAdminProperties
|
|
* @return the set of DatanodeAdminProperties
|
|
* @throws IOException
|
|
* @throws IOException
|
|
*/
|
|
*/
|
|
- public static Set<DatanodeAdminProperties>
|
|
|
|
|
|
+ public static DatanodeAdminProperties[]
|
|
readFile(final String hostsFile) throws IOException {
|
|
readFile(final String hostsFile) throws IOException {
|
|
- HashSet<DatanodeAdminProperties> allDNs = new HashSet<>();
|
|
|
|
|
|
+ DatanodeAdminProperties[] allDNs = new DatanodeAdminProperties[0];
|
|
|
|
+ ObjectMapper objectMapper = new ObjectMapper();
|
|
|
|
+ boolean tryOldFormat = false;
|
|
try (Reader input =
|
|
try (Reader input =
|
|
- new InputStreamReader(new FileInputStream(hostsFile), "UTF-8")) {
|
|
|
|
- Iterator<DatanodeAdminProperties> iterator =
|
|
|
|
- READER.readValues(JSON_FACTORY.createParser(input));
|
|
|
|
- while (iterator.hasNext()) {
|
|
|
|
- DatanodeAdminProperties properties = iterator.next();
|
|
|
|
- allDNs.add(properties);
|
|
|
|
|
|
+ new InputStreamReader(new FileInputStream(hostsFile), "UTF-8")) {
|
|
|
|
+ allDNs = objectMapper.readValue(input, DatanodeAdminProperties[].class);
|
|
|
|
+ } catch (JsonMappingException jme) {
|
|
|
|
+ // The old format doesn't have json top-level token to enclose the array.
|
|
|
|
+ // For backward compatibility, try parsing the old format.
|
|
|
|
+ tryOldFormat = true;
|
|
|
|
+ LOG.warn("{} has invalid JSON format." +
|
|
|
|
+ "Try the old format without top-level token defined.", hostsFile);
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ if (tryOldFormat) {
|
|
|
|
+ ObjectReader objectReader =
|
|
|
|
+ objectMapper.readerFor(DatanodeAdminProperties.class);
|
|
|
|
+ JsonFactory jsonFactory = new JsonFactory();
|
|
|
|
+ List<DatanodeAdminProperties> all = new ArrayList<>();
|
|
|
|
+ try (Reader input =
|
|
|
|
+ new InputStreamReader(new FileInputStream(hostsFile), "UTF-8")) {
|
|
|
|
+ Iterator<DatanodeAdminProperties> iterator =
|
|
|
|
+ objectReader.readValues(jsonFactory.createParser(input));
|
|
|
|
+ while (iterator.hasNext()) {
|
|
|
|
+ DatanodeAdminProperties properties = iterator.next();
|
|
|
|
+ all.add(properties);
|
|
|
|
+ }
|
|
}
|
|
}
|
|
|
|
+ allDNs = all.toArray(new DatanodeAdminProperties[all.size()]);
|
|
}
|
|
}
|
|
return allDNs;
|
|
return allDNs;
|
|
}
|
|
}
|