Browse Source

AMBARI-11704. Mahout service check fails due to bad folder permission for /user/ambari-qa

Sumit Mohanty 10 years ago
parent
commit
cbcadd2c3e

+ 4 - 1
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py

@@ -90,7 +90,10 @@ def namenode(action=None, do_format=True, rolling_restart=False, env=None):
               user = params.hdfs_user)
               user = params.hdfs_user)
 
 
     is_namenode_safe_mode_off = format("hadoop dfsadmin -fs {namenode_address} -safemode get | grep 'Safe mode is OFF'")
     is_namenode_safe_mode_off = format("hadoop dfsadmin -fs {namenode_address} -safemode get | grep 'Safe mode is OFF'")
-    is_active_namenode_cmd = as_user(format("hdfs --config {hadoop_conf_dir} haadmin -getServiceState {namenode_id} | grep active"), params.hdfs_user, env={'PATH':params.hadoop_bin_dir})
+    if params.dfs_ha_enabled:
+      is_active_namenode_cmd = as_user(format("hdfs --config {hadoop_conf_dir} haadmin -getServiceState {namenode_id} | grep active"), params.hdfs_user, env={'PATH':params.hadoop_bin_dir})
+    else:
+      is_active_namenode_cmd = None
 
 
     # During normal operations, if HA is enabled and it is in standby, then stay in current state, otherwise, leave safemode.
     # During normal operations, if HA is enabled and it is in standby, then stay in current state, otherwise, leave safemode.
     # During Rolling Upgrade, both namenodes must leave safemode.
     # During Rolling Upgrade, both namenodes must leave safemode.

+ 9 - 9
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py

@@ -101,7 +101,7 @@ class TestNamenode(RMFTestCase):
                               )
                               )
     self.assertResourceCalled('HdfsResource', '/tmp',
     self.assertResourceCalled('HdfsResource', '/tmp',
         security_enabled = False,
         security_enabled = False,
-        only_if="ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState None | grep active'",
+        only_if=None,
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
         default_fs = 'wasb://abc@c6401.ambari.apache.org',
         default_fs = 'wasb://abc@c6401.ambari.apache.org',
@@ -117,7 +117,7 @@ class TestNamenode(RMFTestCase):
     )
     )
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
         security_enabled = False,
         security_enabled = False,
-        only_if="ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState None | grep active'",
+        only_if=None,
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
         default_fs = 'wasb://abc@c6401.ambari.apache.org',
         default_fs = 'wasb://abc@c6401.ambari.apache.org',
@@ -133,7 +133,7 @@ class TestNamenode(RMFTestCase):
     )
     )
     self.assertResourceCalled('HdfsResource', None,
     self.assertResourceCalled('HdfsResource', None,
         security_enabled = False,
         security_enabled = False,
-        only_if="ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState None | grep active'",
+        only_if=None,
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
         default_fs = 'wasb://abc@c6401.ambari.apache.org',
         default_fs = 'wasb://abc@c6401.ambari.apache.org',
@@ -217,7 +217,7 @@ class TestNamenode(RMFTestCase):
     )
     )
     self.assertResourceCalled('HdfsResource', '/tmp',
     self.assertResourceCalled('HdfsResource', '/tmp',
         security_enabled = False,
         security_enabled = False,
-        only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState None | grep active'",
+        only_if = None,
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
@@ -233,7 +233,7 @@ class TestNamenode(RMFTestCase):
     )
     )
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
         security_enabled = False,
         security_enabled = False,
-        only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState None | grep active'",
+        only_if = None,
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
@@ -249,7 +249,7 @@ class TestNamenode(RMFTestCase):
     )
     )
     self.assertResourceCalled('HdfsResource', None,
     self.assertResourceCalled('HdfsResource', None,
         security_enabled = False,
         security_enabled = False,
-        only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState None | grep active'",
+        only_if = None,
         keytab = UnknownConfigurationMock(),
         keytab = UnknownConfigurationMock(),
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
@@ -365,7 +365,7 @@ class TestNamenode(RMFTestCase):
         type = 'directory',
         type = 'directory',
         action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         mode = 0777,
         mode = 0777,
-        only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState None | grep active'"
+        only_if = None
     )
     )
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
         security_enabled = True,
         security_enabled = True,
@@ -378,11 +378,11 @@ class TestNamenode(RMFTestCase):
         type = 'directory',
         type = 'directory',
         action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         mode = 0770,
         mode = 0770,
-        only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState None | grep active'"
+        only_if = None
     )
     )
     self.assertResourceCalled('HdfsResource', None,
     self.assertResourceCalled('HdfsResource', None,
         security_enabled = True,
         security_enabled = True,
-        only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState None | grep active'",
+        only_if = None,
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         hadoop_bin_dir = '/usr/bin',
         hadoop_bin_dir = '/usr/bin',
         kinit_path_local = '/usr/bin/kinit',
         kinit_path_local = '/usr/bin/kinit',