Explorar el Código

AMBARI-8174. Ambari-deployed cluster can't start datanode as root from command line. (swagle)

Siddharth Wagle hace 10 años
padre
commit
d256ab8f94

+ 2 - 1
ambari-common/src/main/python/resource_management/core/source.py

@@ -114,7 +114,8 @@ else:
       self.context = variables.copy() if variables else {}
       if not hasattr(self, 'template_env'):
         self.template_env = JinjaEnvironment(loader=TemplateLoader(self.env),
-                                        autoescape=False, undefined=StrictUndefined, trim_blocks=True)
+                                        autoescape=False, undefined=StrictUndefined,
+                                        trim_blocks=True)
         
       self.template = self.template_env.get_template(self.name)     
     

+ 0 - 22
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py

@@ -81,25 +81,3 @@ def hdfs(name=None):
   
   if params.lzo_enabled:
     Package(params.lzo_packages_for_current_host)
-
-def setup_hadoop_env(replace=False):
-  import params
-
-  if params.security_enabled:
-    tc_owner = "root"
-  else:
-    tc_owner = params.hdfs_user
-  Directory(params.hadoop_conf_empty_dir,
-            recursive=True,
-            owner='root',
-            group='root'
-  )
-  Link(params.hadoop_conf_dir,
-       to=params.hadoop_conf_empty_dir,
-       not_if=format("ls {hadoop_conf_dir}")
-  )
-  File(os.path.join(params.hadoop_conf_dir, 'hadoop-env.sh'),
-       owner=tc_owner,
-       content=InlineTemplate(params.hadoop_env_sh_template),
-       replace=replace
-  )

+ 14 - 4
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py

@@ -103,11 +103,22 @@ def service(action=None, name=None, user=None, create_pid_dir=False,
     pass
   pass
 
+  service_is_up = check_process if action == "start" else None
+
   # Set HADOOP_SECURE_DN_USER correctly in hadoop-env if DN is running as root
   # in secure mode.
-  if name == 'datanode' and user == 'root':
-    params.dn_proc_user = 'root'
-    hdfs.setup_hadoop_env(replace=True)
+  set_secure_dn_user_cmd="sed -i 's/export HADOOP_SECURE_DN_USER=.*/export " \
+                "HADOOP_SECURE_DN_USER=\"{0}\"/' {1}"
+  if name == 'datanode' and action == 'start':
+    if user == 'root':
+      secure_dn_user = params.hdfs_user
+    else:
+      secure_dn_user = ""
+    pass
+
+    Execute(set_secure_dn_user_cmd.format(secure_dn_user,
+              os.path.join(params.hadoop_conf_dir, 'hadoop-env.sh')),
+            not_if=service_is_up)
   pass
 
   hadoop_env_exports_str = ''
@@ -121,7 +132,6 @@ def service(action=None, name=None, user=None, create_pid_dir=False,
 
   daemon_cmd = format("{ulimit_cmd} su -s /bin/bash - {user} -c '{cmd} {action} {name}'")
 
-  service_is_up = check_process if action == "start" else None
   #remove pid file from dead process
   File(pid_file,
        action="delete",

+ 1 - 6
ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml

@@ -135,12 +135,7 @@ export JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}
 
 export HADOOP_OPTS="-Dhdp.version=$HDP_VERSION $HADOOP_OPTS"
 
-HDFS_DN_PROC_USER={{dn_proc_user}}
-if [ $HDFS_DN_PROC_USER == "root" ]; then
-  export HADOOP_SECURE_DN_USER="{{hdfs_user}}"
-else
-  export HADOOP_SECURE_DN_USER=""
-fi
+export HADOOP_SECURE_DN_USER=""
     </value>
   </property>
   

+ 10 - 54
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py

@@ -49,6 +49,9 @@ class TestDatanode(RMFTestCase):
                               owner = 'hdfs',
                               recursive = True,
                               )
+    self.assertResourceCalled('Execute', "sed -i 's/export HADOOP_SECURE_DN_USER=.*/export HADOOP_SECURE_DN_USER=\"\"/' /etc/hadoop/conf/hadoop-env.sh",
+                              not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
+    )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
@@ -109,19 +112,8 @@ class TestDatanode(RMFTestCase):
                               owner = 'hdfs',
                               recursive = True,
                               )
-    self.assertResourceCalled('Directory', '/etc/hadoop/conf.empty',
-                              recursive=True,
-                              owner='root',
-                              group='root'
-    )
-    self.assertResourceCalled('Link', '/etc/hadoop/conf',
-                              to='/etc/hadoop/conf.empty',
-                              not_if='ls /etc/hadoop/conf'
-    )
-    self.assertResourceCalled('File', os.path.join('/etc/hadoop/conf', 'hadoop-env.sh'),
-                              owner='root',
-                              content=InlineTemplate(self.getConfig()['configurations']['hadoop-env']['content']),
-                              replace=True
+    self.assertResourceCalled('Execute', "sed -i 's/export HADOOP_SECURE_DN_USER=.*/export HADOOP_SECURE_DN_USER=\"hdfs\"/' /etc/hadoop/conf/hadoop-env.sh",
+                              not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
     )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
                               action = ['delete'],
@@ -153,19 +145,8 @@ class TestDatanode(RMFTestCase):
                               owner = 'hdfs',
                               recursive = True,
                               )
-    self.assertResourceCalled('Directory', '/etc/hadoop/conf.empty',
-                              recursive=True,
-                              owner='root',
-                              group='root'
-    )
-    self.assertResourceCalled('Link', '/etc/hadoop/conf',
-                              to='/etc/hadoop/conf.empty',
-                              not_if='ls /etc/hadoop/conf'
-    )
-    self.assertResourceCalled('File', os.path.join('/etc/hadoop/conf', 'hadoop-env.sh'),
-                              owner='root',
-                              content=InlineTemplate(self.getConfig()['configurations']['hadoop-env']['content']),
-                              replace=True
+    self.assertResourceCalled('Execute', "sed -i 's/export HADOOP_SECURE_DN_USER=.*/export HADOOP_SECURE_DN_USER=\"hdfs\"/' /etc/hadoop/conf/hadoop-env.sh",
+                              not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
     )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
                               action = ['delete'],
@@ -200,6 +181,9 @@ class TestDatanode(RMFTestCase):
                               owner = 'hdfs',
                               recursive = True,
                               )
+    self.assertResourceCalled('Execute', "sed -i 's/export HADOOP_SECURE_DN_USER=.*/export HADOOP_SECURE_DN_USER=\"\"/' /etc/hadoop/conf/hadoop-env.sh",
+                              not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
+    )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
@@ -224,20 +208,6 @@ class TestDatanode(RMFTestCase):
                               owner = 'hdfs',
                               recursive = True,
                               )
-    self.assertResourceCalled('Directory', '/etc/hadoop/conf.empty',
-                              recursive=True,
-                              owner='root',
-                              group='root'
-    )
-    self.assertResourceCalled('Link', '/etc/hadoop/conf',
-                              to='/etc/hadoop/conf.empty',
-                              not_if='ls /etc/hadoop/conf'
-    )
-    self.assertResourceCalled('File', os.path.join('/etc/hadoop/conf', 'hadoop-env.sh'),
-                              owner='root',
-                              content=InlineTemplate(self.getConfig()['configurations']['hadoop-env']['content']),
-                              replace=True
-    )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
@@ -272,20 +242,6 @@ class TestDatanode(RMFTestCase):
                               owner = 'hdfs',
                               recursive = True,
                               )
-    self.assertResourceCalled('Directory', '/etc/hadoop/conf.empty',
-                              recursive=True,
-                              owner='root',
-                              group='root'
-    )
-    self.assertResourceCalled('Link', '/etc/hadoop/conf',
-                              to='/etc/hadoop/conf.empty',
-                              not_if='ls /etc/hadoop/conf'
-    )
-    self.assertResourceCalled('File', os.path.join('/etc/hadoop/conf', 'hadoop-env.sh'),
-                              owner='root',
-                              content=InlineTemplate(self.getConfig()['configurations']['hadoop-env']['content']),
-                              replace=True
-    )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',