瀏覽代碼

AMBARI-16162. Reduce NN start time by removing redundant haadmin calls. (aonishuk)

Andrew Onishuk 9 年之前
父節點
當前提交
35ebd80d87

+ 11 - 8
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py

@@ -180,6 +180,7 @@ def namenode(action=None, hdfs_binary=None, do_format=True, upgrade_type=None,
     # EU non-HA             | no change                | no check                 |
     # EU non-HA             | no change                | no check                 |
 
 
     check_for_safemode_off = False
     check_for_safemode_off = False
+    is_active_namenode = False
     msg = ""
     msg = ""
     if params.dfs_ha_enabled:
     if params.dfs_ha_enabled:
       if upgrade_type is not None:
       if upgrade_type is not None:
@@ -187,14 +188,16 @@ def namenode(action=None, hdfs_binary=None, do_format=True, upgrade_type=None,
         msg = "Must wait to leave safemode since High Availability is enabled during a Stack Upgrade"
         msg = "Must wait to leave safemode since High Availability is enabled during a Stack Upgrade"
       else:
       else:
         Logger.info("Wait for NameNode to become active.")
         Logger.info("Wait for NameNode to become active.")
-        if is_active_namenode(hdfs_binary): # active
+        if check_is_active_namenode(hdfs_binary): # active
           check_for_safemode_off = True
           check_for_safemode_off = True
+          is_active_namenode = True
           msg = "Must wait to leave safemode since High Availability is enabled and this is the Active NameNode."
           msg = "Must wait to leave safemode since High Availability is enabled and this is the Active NameNode."
         else:
         else:
           msg = "Will remain in the current safemode state."
           msg = "Will remain in the current safemode state."
     else:
     else:
       msg = "Must wait to leave safemode since High Availability is not enabled."
       msg = "Must wait to leave safemode since High Availability is not enabled."
       check_for_safemode_off = True
       check_for_safemode_off = True
+      is_active_namenode = True
 
 
     Logger.info(msg)
     Logger.info(msg)
 
 
@@ -209,8 +212,11 @@ def namenode(action=None, hdfs_binary=None, do_format=True, upgrade_type=None,
         wait_for_safemode_off(hdfs_binary)
         wait_for_safemode_off(hdfs_binary)
 
 
     # Always run this on non-HA, or active NameNode during HA.
     # Always run this on non-HA, or active NameNode during HA.
-    create_hdfs_directories(is_active_namenode_cmd)
-    create_ranger_audit_hdfs_directories(is_active_namenode_cmd)
+    if is_active_namenode:
+      create_hdfs_directories()
+      create_ranger_audit_hdfs_directories()
+    else:
+      Logger.info("Skipping creating hdfs directories as is not active NN.")
 
 
   elif action == "stop":
   elif action == "stop":
     import params
     import params
@@ -267,7 +273,7 @@ def create_name_dirs(directories):
   )
   )
 
 
 
 
-def create_hdfs_directories(check):
+def create_hdfs_directories():
   import params
   import params
 
 
   params.HdfsResource(params.hdfs_tmp_dir,
   params.HdfsResource(params.hdfs_tmp_dir,
@@ -275,18 +281,15 @@ def create_hdfs_directories(check):
                        action="create_on_execute",
                        action="create_on_execute",
                        owner=params.hdfs_user,
                        owner=params.hdfs_user,
                        mode=0777,
                        mode=0777,
-                       only_if=check
   )
   )
   params.HdfsResource(params.smoke_hdfs_user_dir,
   params.HdfsResource(params.smoke_hdfs_user_dir,
                        type="directory",
                        type="directory",
                        action="create_on_execute",
                        action="create_on_execute",
                        owner=params.smoke_user,
                        owner=params.smoke_user,
                        mode=params.smoke_hdfs_user_mode,
                        mode=params.smoke_hdfs_user_mode,
-                       only_if=check
   )
   )
   params.HdfsResource(None, 
   params.HdfsResource(None, 
                       action="execute",
                       action="execute",
-                      only_if=check #skip creation when HA not active
   )
   )
 
 
 def format_namenode(force=None):
 def format_namenode(force=None):
@@ -508,7 +511,7 @@ def is_namenode_bootstrapped(params):
   return marked
   return marked
 
 
 
 
-def is_active_namenode(hdfs_binary):
+def check_is_active_namenode(hdfs_binary):
   """
   """
   Checks if current NameNode is active. Waits up to 30 seconds. If other NameNode is active returns False.
   Checks if current NameNode is active. Waits up to 30 seconds. If other NameNode is active returns False.
   :return: True if current NameNode is active, False otherwise
   :return: True if current NameNode is active, False otherwise

+ 2 - 4
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/setup_ranger_hdfs.py

@@ -95,7 +95,7 @@ def setup_ranger_hdfs(upgrade_type=None):
   else:
   else:
     Logger.info('Ranger admin not installed')
     Logger.info('Ranger admin not installed')
 
 
-def create_ranger_audit_hdfs_directories(check):
+def create_ranger_audit_hdfs_directories():
   import params
   import params
 
 
   if params.has_ranger_admin:
   if params.has_ranger_admin:
@@ -107,7 +107,6 @@ def create_ranger_audit_hdfs_directories(check):
                          group=params.hdfs_user,
                          group=params.hdfs_user,
                          mode=0755,
                          mode=0755,
                          recursive_chmod=True,
                          recursive_chmod=True,
-                         only_if=check
       )
       )
       params.HdfsResource("/ranger/audit/hdfs",
       params.HdfsResource("/ranger/audit/hdfs",
                          type="directory",
                          type="directory",
@@ -116,8 +115,7 @@ def create_ranger_audit_hdfs_directories(check):
                          group=params.hdfs_user,
                          group=params.hdfs_user,
                          mode=0700,
                          mode=0700,
                          recursive_chmod=True,
                          recursive_chmod=True,
-                         only_if=check
       )
       )
-      params.HdfsResource(None, action="execute", only_if=check)
+      params.HdfsResource(None, action="execute")
   else:
   else:
     Logger.info('Ranger admin not installed')
     Logger.info('Ranger admin not installed')

+ 3 - 33
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py

@@ -100,7 +100,6 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/tmp',
     self.assertResourceCalled('HdfsResource', '/tmp',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
         security_enabled = False,
-        only_if=True,
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
         default_fs = 'wasb://abc@c6401.ambari.apache.org',
         default_fs = 'wasb://abc@c6401.ambari.apache.org',
@@ -118,7 +117,6 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
         security_enabled = False,
-        only_if=True,
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
         default_fs = 'wasb://abc@c6401.ambari.apache.org',
         default_fs = 'wasb://abc@c6401.ambari.apache.org',
@@ -136,7 +134,6 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('HdfsResource', None,
     self.assertResourceCalled('HdfsResource', None,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
         security_enabled = False,
-        only_if=True,
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
         default_fs = 'wasb://abc@c6401.ambari.apache.org',
         default_fs = 'wasb://abc@c6401.ambari.apache.org',
@@ -218,7 +215,6 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/tmp',
     self.assertResourceCalled('HdfsResource', '/tmp',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
         security_enabled = False,
-        only_if = True,
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
@@ -236,7 +232,6 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
         security_enabled = False,
-        only_if = True,
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
@@ -254,7 +249,6 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('HdfsResource', None,
     self.assertResourceCalled('HdfsResource', None,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
         security_enabled = False,
-        only_if = True,
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
@@ -361,8 +355,7 @@ class TestNamenode(RMFTestCase):
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
-        mode = 0777,
-        only_if = True
+        mode = 0777
     )
     )
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
@@ -376,13 +369,11 @@ class TestNamenode(RMFTestCase):
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
-        mode = 0770,
-        only_if = True
+        mode = 0770
     )
     )
     self.assertResourceCalled('HdfsResource', None,
     self.assertResourceCalled('HdfsResource', None,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = True,
         security_enabled = True,
-        only_if = True,
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',
@@ -453,7 +444,6 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/tmp',
     self.assertResourceCalled('HdfsResource', '/tmp',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
         security_enabled = False,
-        only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn1 | grep active'",
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
         default_fs = 'hdfs://ns1',
         default_fs = 'hdfs://ns1',
@@ -471,7 +461,6 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
         security_enabled = False,
-        only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn1 | grep active'",
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
         default_fs = 'hdfs://ns1',
         default_fs = 'hdfs://ns1',
@@ -489,7 +478,6 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('HdfsResource', None,
     self.assertResourceCalled('HdfsResource', None,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
         security_enabled = False,
-        only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn1 | grep active'",
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
         default_fs = 'hdfs://ns1',
         default_fs = 'hdfs://ns1',
@@ -555,7 +543,6 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/tmp',
     self.assertResourceCalled('HdfsResource', '/tmp',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
         security_enabled = False,
-        only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn1 | grep active'",
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
         default_fs = 'hdfs://ns1',
         default_fs = 'hdfs://ns1',
@@ -573,7 +560,6 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
         security_enabled = False,
-        only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn1 | grep active'",
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
         default_fs = 'hdfs://ns1',
         default_fs = 'hdfs://ns1',
@@ -591,7 +577,6 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('HdfsResource', None,
     self.assertResourceCalled('HdfsResource', None,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
         security_enabled = False,
-        only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn1 | grep active'",
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
         default_fs = 'hdfs://ns1',
         default_fs = 'hdfs://ns1',
@@ -663,7 +648,6 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/tmp',
     self.assertResourceCalled('HdfsResource', '/tmp',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = True,
         security_enabled = True,
-        only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn1 | grep active'",
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
         default_fs = 'hdfs://ns1',
         default_fs = 'hdfs://ns1',
@@ -681,7 +665,6 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = True,
         security_enabled = True,
-        only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn1 | grep active'",
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
         default_fs = 'hdfs://ns1',
         default_fs = 'hdfs://ns1',
@@ -699,7 +682,6 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('HdfsResource', None,
     self.assertResourceCalled('HdfsResource', None,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = True,
         security_enabled = True,
-        only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn1 | grep active'",
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
         default_fs = 'hdfs://ns1',
         default_fs = 'hdfs://ns1',
@@ -771,7 +753,6 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/tmp',
     self.assertResourceCalled('HdfsResource', '/tmp',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
         security_enabled = False,
-        only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn1 | grep active'",
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
         default_fs = 'hdfs://ns1',
         default_fs = 'hdfs://ns1',
@@ -789,7 +770,6 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
         security_enabled = False,
-        only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn1 | grep active'",
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
         default_fs = 'hdfs://ns1',
         default_fs = 'hdfs://ns1',
@@ -807,7 +787,6 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('HdfsResource', None,
     self.assertResourceCalled('HdfsResource', None,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
         security_enabled = False,
-        only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn1 | grep active'",
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
         default_fs = 'hdfs://ns1',
         default_fs = 'hdfs://ns1',
@@ -881,7 +860,6 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/tmp',
     self.assertResourceCalled('HdfsResource', '/tmp',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
         security_enabled = False,
-        only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn2 | grep active'",
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
         default_fs = 'hdfs://ns1',
         default_fs = 'hdfs://ns1',
@@ -899,7 +877,6 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
         security_enabled = False,
-        only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn2 | grep active'",
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
         default_fs = 'hdfs://ns1',
         default_fs = 'hdfs://ns1',
@@ -917,7 +894,6 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('HdfsResource', None,
     self.assertResourceCalled('HdfsResource', None,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
         security_enabled = False,
-        only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn2 | grep active'",
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
         default_fs = 'hdfs://ns1',
         default_fs = 'hdfs://ns1',
@@ -999,7 +975,6 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/tmp',
     self.assertResourceCalled('HdfsResource', '/tmp',
                               immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                               immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                               security_enabled = False,
                               security_enabled = False,
-                              only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn2 | grep active'",
                               keytab = UnknownConfigurationMock(),
                               keytab = UnknownConfigurationMock(),
                               hadoop_bin_dir = '/usr/bin',
                               hadoop_bin_dir = '/usr/bin',
                               default_fs = 'hdfs://ns1',
                               default_fs = 'hdfs://ns1',
@@ -1017,7 +992,6 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
                               immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                               immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                               security_enabled = False,
                               security_enabled = False,
-                              only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn2 | grep active'",
                               keytab = UnknownConfigurationMock(),
                               keytab = UnknownConfigurationMock(),
                               hadoop_bin_dir = '/usr/bin',
                               hadoop_bin_dir = '/usr/bin',
                               default_fs = 'hdfs://ns1',
                               default_fs = 'hdfs://ns1',
@@ -1035,7 +1009,6 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('HdfsResource', None,
     self.assertResourceCalled('HdfsResource', None,
                               immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                               immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                               security_enabled = False,
                               security_enabled = False,
-                              only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn2 | grep active'",
                               keytab = UnknownConfigurationMock(),
                               keytab = UnknownConfigurationMock(),
                               hadoop_bin_dir = '/usr/bin',
                               hadoop_bin_dir = '/usr/bin',
                               default_fs = 'hdfs://ns1',
                               default_fs = 'hdfs://ns1',
@@ -1115,7 +1088,6 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/tmp',
     self.assertResourceCalled('HdfsResource', '/tmp',
                               immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                               immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                               security_enabled = False,
                               security_enabled = False,
-                              only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn2 | grep active'",
                               keytab = UnknownConfigurationMock(),
                               keytab = UnknownConfigurationMock(),
                               hadoop_bin_dir = '/usr/bin',
                               hadoop_bin_dir = '/usr/bin',
                               default_fs = 'hdfs://ns1',
                               default_fs = 'hdfs://ns1',
@@ -1133,7 +1105,6 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
                               immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                               immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                               security_enabled = False,
                               security_enabled = False,
-                              only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn2 | grep active'",
                               keytab = UnknownConfigurationMock(),
                               keytab = UnknownConfigurationMock(),
                               hadoop_bin_dir = '/usr/bin',
                               hadoop_bin_dir = '/usr/bin',
                               default_fs = 'hdfs://ns1',
                               default_fs = 'hdfs://ns1',
@@ -1151,7 +1122,6 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('HdfsResource', None,
     self.assertResourceCalled('HdfsResource', None,
                               immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                               immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                               security_enabled = False,
                               security_enabled = False,
-                              only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -ns ns1 -getServiceState nn2 | grep active'",
                               keytab = UnknownConfigurationMock(),
                               keytab = UnknownConfigurationMock(),
                               hadoop_bin_dir = '/usr/bin',
                               hadoop_bin_dir = '/usr/bin',
                               default_fs = 'hdfs://ns1',
                               default_fs = 'hdfs://ns1',
@@ -1592,7 +1562,7 @@ class TestNamenode(RMFTestCase):
     self.assertTrue(calls[0].startsWith("conf-select create-conf-dir --package hadoop --stack-version 2.3.2.0-2844 --conf-version 0"))
     self.assertTrue(calls[0].startsWith("conf-select create-conf-dir --package hadoop --stack-version 2.3.2.0-2844 --conf-version 0"))
 
 
 
 
-  @patch("hdfs_namenode.is_active_namenode")
+  @patch("hdfs_namenode.check_is_active_namenode")
   @patch("resource_management.libraries.functions.setup_ranger_plugin_xml.setup_ranger_plugin")
   @patch("resource_management.libraries.functions.setup_ranger_plugin_xml.setup_ranger_plugin")
   @patch("utils.get_namenode_states")
   @patch("utils.get_namenode_states")
   def test_upgrade_restart_eu_with_ranger(self, get_namenode_states_mock, setup_ranger_plugin_mock, is_active_nn_mock):
   def test_upgrade_restart_eu_with_ranger(self, get_namenode_states_mock, setup_ranger_plugin_mock, is_active_nn_mock):