|
@@ -17,6 +17,11 @@
|
|
|
*/
|
|
|
package org.apache.ambari.server.upgrade;
|
|
|
|
|
|
+import javax.persistence.EntityManager;
|
|
|
+import javax.xml.bind.JAXBException;
|
|
|
+import javax.xml.parsers.DocumentBuilder;
|
|
|
+import javax.xml.parsers.DocumentBuilderFactory;
|
|
|
+
|
|
|
import java.io.File;
|
|
|
import java.io.FileReader;
|
|
|
import java.io.FilenameFilter;
|
|
@@ -42,11 +47,6 @@ import java.util.TreeMap;
|
|
|
import java.util.regex.Matcher;
|
|
|
import java.util.regex.Pattern;
|
|
|
|
|
|
-import javax.persistence.EntityManager;
|
|
|
-import javax.xml.bind.JAXBException;
|
|
|
-import javax.xml.parsers.DocumentBuilder;
|
|
|
-import javax.xml.parsers.DocumentBuilderFactory;
|
|
|
-
|
|
|
import org.apache.ambari.server.AmbariException;
|
|
|
import org.apache.ambari.server.api.services.AmbariMetaInfo;
|
|
|
import org.apache.ambari.server.configuration.Configuration;
|
|
@@ -447,11 +447,16 @@ public abstract class AbstractUpgradeCatalog implements UpgradeCatalog {
|
|
|
Config hdfsSiteConfig = cluster.getDesiredConfigByType(CONFIGURATION_TYPE_HDFS_SITE);
|
|
|
if (hdfsSiteConfig != null) {
|
|
|
Map<String, String> properties = hdfsSiteConfig.getProperties();
|
|
|
+ if (properties.containsKey("dfs.internal.nameservices")) {
|
|
|
+ return true;
|
|
|
+ }
|
|
|
String nameServices = properties.get(PROPERTY_DFS_NAMESERVICES);
|
|
|
if (!StringUtils.isEmpty(nameServices)) {
|
|
|
- String namenodes = properties.get(String.format("dfs.ha.namenodes.%s", nameServices));
|
|
|
- if (!StringUtils.isEmpty(namenodes)) {
|
|
|
- return (namenodes.split(",").length > 1);
|
|
|
+ for (String nameService : nameServices.split(",")) {
|
|
|
+ String namenodes = properties.get(String.format("dfs.ha.namenodes.%s", nameService));
|
|
|
+ if (!StringUtils.isEmpty(namenodes)) {
|
|
|
+ return (namenodes.split(",").length > 1);
|
|
|
+ }
|
|
|
}
|
|
|
}
|
|
|
}
|