瀏覽代碼

AMBARI-6496. Ambari fails to start services because it still tries to kinit for hdfs_user when hdfs_principal_name is customized. (Alejandro Fernandez via swagle)

Siddharth Wagle 11 年之前
父節點
當前提交
66ce3ed8ba
共有 28 個文件被更改,包括 63 次插入23 次删除
  1. 1 1
      ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
  2. 1 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/params.py
  3. 1 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_namenode.py
  4. 1 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py
  5. 19 8
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hcat_service_check.py
  6. 1 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
  7. 1 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/params.py
  8. 1 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/params.py
  9. 1 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/params.py
  10. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py
  11. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
  12. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
  13. 19 8
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat_service_check.py
  14. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/install_jars.py
  15. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
  16. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py
  17. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/params.py
  18. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/params.py
  19. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat.py
  20. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
  21. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/FALCON/package/scripts/params.py
  22. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/package/scripts/params.py
  23. 1 0
      ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/package/scripts/params.py
  24. 2 1
      ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_service_check.py
  25. 0 0
      ambari-server/src/test/python/stacks/1.3.2/configs/secured.json
  26. 2 1
      ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
  27. 0 0
      ambari-server/src/test/python/stacks/2.0.6/configs/secured.json
  28. 0 0
      ambari-server/src/test/python/stacks/2.1/configs/secured.json

+ 1 - 1
ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py

@@ -90,7 +90,7 @@ class HdfsDirectoryProvider(Provider):
         chown_commands.append(format("hadoop fs -chown {recursive} {chown} {chown_dirs_str}"))
 
     if secured:
-        Execute(format("{kinit_path} -kt {keytab_file} {hdp_hdfs_user}"),
+        Execute(format("{kinit_path} -kt {keytab_file} {hdfs_principal_name}"),
                 user=hdp_hdfs_user)
     #create all directories in one 'mkdir' call
     dir_list_str = ' '.join(directories_list)

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/params.py

@@ -100,6 +100,7 @@ hostname = config["hostname"]
 hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 import functools
 #create partial functions with common arguments for every HdfsDirectory call

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_namenode.py

@@ -39,7 +39,7 @@ def namenode(action=None, do_format=True):
 
     namenode_safe_mode_off = format("su - {hdfs_user} -c 'hadoop dfsadmin -safemode get' | grep 'Safe mode is OFF'")
     if params.security_enabled:
-      Execute(format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_user}"),
+      Execute(format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_principal_name}"),
               user = params.hdfs_user)
     Execute(namenode_safe_mode_off,
             tries=40,

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py

@@ -128,6 +128,7 @@ hostname = config["hostname"]
 hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 import functools
 #create partial functions with common arguments for every HdfsDirectory call

+ 19 - 8
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hcat_service_check.py

@@ -48,14 +48,25 @@ def hcat_service_check():
             path=['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
             logoutput=True)
 
-    ExecuteHadoop(test_cmd,
-                  user=params.hdfs_user,
-                  logoutput=True,
-                  conf_dir=params.hadoop_conf_dir,
-                  security_enabled=params.security_enabled,
-                  kinit_path_local=params.kinit_path_local,
-                  keytab=params.hdfs_user_keytab
-    )
+    if params.security_enabled:
+      ExecuteHadoop(test_cmd,
+                    user=params.hdfs_user,
+                    logoutput=True,
+                    conf_dir=params.hadoop_conf_dir,
+                    security_enabled=params.security_enabled,
+                    kinit_path_local=params.kinit_path_local,
+                    keytab=params.hdfs_user_keytab,
+                    principal=params.hdfs_principal_name
+      )
+    else:
+      ExecuteHadoop(test_cmd,
+                    user=params.hdfs_user,
+                    logoutput=True,
+                    conf_dir=params.hadoop_conf_dir,
+                    security_enabled=params.security_enabled,
+                    kinit_path_local=params.kinit_path_local,
+                    keytab=params.hdfs_user_keytab
+      )
 
     cleanup_cmd = format("{kinit_cmd}sh {tmp_dir}/hcatSmoke.sh hcatsmoke{unique} cleanup")
 

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py

@@ -155,6 +155,7 @@ hostname = config["hostname"]
 hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 import functools
 #create partial functions with common arguments for every HdfsDirectory call

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/params.py

@@ -63,6 +63,7 @@ hadoop_conf_dir = "/etc/hadoop/conf"
 hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 import functools
 #create partial functions with common arguments for every HdfsDirectory call
 #to create hdfs directory we need to call params.HdfsDirectory in code

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/params.py

@@ -105,6 +105,7 @@ hostname = config["hostname"]
 hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 import functools
 #create partial functions with common arguments for every HdfsDirectory call

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/params.py

@@ -64,6 +64,7 @@ security_param = "true" if security_enabled else "false"
 hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 import functools
 #create partial functions with common arguments for every HdfsDirectory call

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py

@@ -107,6 +107,7 @@ hostname = config["hostname"]
 hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 import functools
 #create partial functions with common arguments for every HdfsDirectory call

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py

@@ -52,7 +52,7 @@ def namenode(action=None, do_format=True):
     namenode_safe_mode_off = format("su - {hdfs_user} -c 'hadoop dfsadmin -safemode get' | grep 'Safe mode is OFF'")
 
     if params.security_enabled:
-      Execute(format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_user}"),
+      Execute(format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_principal_name}"),
               user = params.hdfs_user)
     Execute(namenode_safe_mode_off,
             tries=40,

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py

@@ -94,6 +94,7 @@ hive_user = config['configurations']['hive-env']['hive_user']
 smoke_user =  config['configurations']['hadoop-env']['smokeuser']
 mapred_user = config['configurations']['mapred-env']['mapred_user']
 hdfs_user = status_params.hdfs_user
+hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 
 user_group = config['configurations']['hadoop-env']['user_group']
 proxyuser_group =  config['configurations']['hadoop-env']['proxyuser_group']

+ 19 - 8
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat_service_check.py

@@ -47,14 +47,25 @@ def hcat_service_check():
             path=['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
             logoutput=True)
 
-    ExecuteHadoop(test_cmd,
-                  user=params.hdfs_user,
-                  logoutput=True,
-                  conf_dir=params.hadoop_conf_dir,
-                  security_enabled=params.security_enabled,
-                  kinit_path_local=params.kinit_path_local,
-                  keytab=params.hdfs_user_keytab
-    )
+    if params.security_enabled:
+      ExecuteHadoop(test_cmd,
+                    user=params.hdfs_user,
+                    logoutput=True,
+                    conf_dir=params.hadoop_conf_dir,
+                    security_enabled=params.security_enabled,
+                    kinit_path_local=params.kinit_path_local,
+                    keytab=params.hdfs_user_keytab,
+                    principal=params.hdfs_principal_name
+      )
+    else:
+      ExecuteHadoop(test_cmd,
+                    user=params.hdfs_user,
+                    logoutput=True,
+                    conf_dir=params.hadoop_conf_dir,
+                    security_enabled=params.security_enabled,
+                    kinit_path_local=params.kinit_path_local,
+                    keytab=params.hdfs_user_keytab
+      )
 
     cleanup_cmd = format("{kinit_cmd} {tmp_dir}/hcatSmoke.sh hcatsmoke{unique} cleanup")
 

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/install_jars.py

@@ -39,7 +39,7 @@ def install_tez_jars():
     params.HdfsDirectory(None, action="create")
 
     if params.security_enabled:
-      kinit_if_needed = format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_user};")
+      kinit_if_needed = format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_principal_name};")
     else:
       kinit_if_needed = ""
 

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py

@@ -168,6 +168,7 @@ hostname = config["hostname"]
 hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 
 # Tez libraries

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py

@@ -97,6 +97,7 @@ hostname = config["hostname"]
 hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 import functools
 #create partial functions with common arguments for every HdfsDirectory call

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/params.py

@@ -28,6 +28,7 @@ tmp_dir = Script.get_tmp_dir()
 pig_conf_dir = "/etc/pig/conf"
 hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 smokeuser = config['configurations']['hadoop-env']['smokeuser']
 user_group = config['configurations']['hadoop-env']['user_group']
 _authentication = config['configurations']['core-site']['hadoop.security.authentication']

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/params.py

@@ -69,6 +69,7 @@ hadoop_conf_dir = "/etc/hadoop/conf"
 security_param = "true" if security_enabled else "false"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 import functools
 #create partial functions with common arguments for every HdfsDirectory call

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat.py

@@ -74,7 +74,7 @@ def webhcat():
   )
 
   if params.security_enabled:
-    kinit_if_needed = format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_user};")
+    kinit_if_needed = format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_principal_name};")
   else:
     kinit_if_needed = ""
 

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py

@@ -123,6 +123,7 @@ hostname = config["hostname"]
 hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 import functools
 #create partial functions with common arguments for every HdfsDirectory call

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/FALCON/package/scripts/params.py

@@ -56,6 +56,7 @@ hostname = config["hostname"]
 hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']
 hdfs_user = config['configurations']['global']['hdfs_user']
+hdfs_principal_name = config['configurations']['global']['hdfs_principal_name']
 kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 import functools
 #create partial functions with common arguments for every HdfsDirectory call

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/package/scripts/params.py

@@ -117,6 +117,7 @@ hostname = config["hostname"]
 hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']
 hdfs_user = config['configurations']['global']['hdfs_user']
+hdfs_principal_name = config['configurations']['global']['hdfs_principal_name']
 kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 import functools
 #create partial functions with common arguments for every HdfsDirectory call

+ 1 - 0
ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/package/scripts/params.py

@@ -57,6 +57,7 @@ hostname = config["hostname"]
 hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 import functools
 #create partial functions with common arguments for every HdfsDirectory call

+ 2 - 1
ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_service_check.py

@@ -86,7 +86,8 @@ class TestServiceCheck(RMFTestCase):
                               conf_dir = '/etc/hadoop/conf',
                               keytab='/etc/security/keytabs/hdfs.headless.keytab',
                               kinit_path_local='/usr/bin/kinit',
-                              security_enabled=True
+                              security_enabled=True,
+                              principal='hdfs'
     )
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa; sh /tmp/hcatSmoke.sh hcatsmoke cleanup',
                         logoutput = True,

File diff suppressed because it is too large
+ 0 - 0
ambari-server/src/test/python/stacks/1.3.2/configs/secured.json


+ 2 - 1
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py

@@ -86,7 +86,8 @@ class TestServiceCheck(RMFTestCase):
                         conf_dir = '/etc/hadoop/conf',
                         keytab='/etc/security/keytabs/hdfs.headless.keytab',
                         kinit_path_local='/usr/bin/kinit',
-                        security_enabled=True
+                        security_enabled=True,
+                        principal='hdfs'
     )
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa;  /tmp/hcatSmoke.sh hcatsmoke cleanup',
                         logoutput = True,

File diff suppressed because it is too large
+ 0 - 0
ambari-server/src/test/python/stacks/2.0.6/configs/secured.json


File diff suppressed because it is too large
+ 0 - 0
ambari-server/src/test/python/stacks/2.1/configs/secured.json


Some files were not shown because too many files changed in this diff