Parcourir la source

AMBARI-4633. Security_enabled was not send when adding service. Add
service failed (aonishuk)

Andrew Onischuk il y a 11 ans
Parent
commit
d3ea89dfc5
29 fichiers modifiés avec 83 ajouts et 35 suppressions
  1. 1 0
      ambari-agent/src/main/python/resource_management/libraries/functions/__init__.py
  2. 28 0
      ambari-agent/src/main/python/resource_management/libraries/functions/is_empty.py
  3. 2 1
      ambari-server/src/main/resources/custom_actions/ambari_hdfs_rebalancer.py
  4. 2 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/params.py
  5. 2 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/params.py
  6. 2 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py
  7. 2 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
  8. 2 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/params.py
  9. 2 5
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/scripts/functions.py
  10. 2 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/scripts/params.py
  11. 2 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/params.py
  12. 2 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/package/scripts/params.py
  13. 2 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/package/scripts/params.py
  14. 2 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/params.py
  15. 2 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/ZOOKEEPER/package/scripts/params.py
  16. 2 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
  17. 2 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py
  18. 2 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
  19. 2 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
  20. 2 5
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/scripts/functions.py
  21. 2 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/scripts/params.py
  22. 2 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py
  23. 2 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/params.py
  24. 2 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py
  25. 2 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/params.py
  26. 2 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
  27. 2 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/params.py
  28. 2 1
      ambari-server/src/main/resources/stacks/HDP/2.1.1/services/FALCON/package/scripts/params.py
  29. 2 1
      ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HIVE/package/scripts/params.py

+ 1 - 0
ambari-agent/src/main/python/resource_management/libraries/functions/__init__.py

@@ -25,3 +25,4 @@ from resource_management.libraries.functions.format import *
 from resource_management.libraries.functions.get_kinit_path import *
 from resource_management.libraries.functions.get_kinit_path import *
 from resource_management.libraries.functions.get_unique_id_and_date import *
 from resource_management.libraries.functions.get_unique_id_and_date import *
 from resource_management.libraries.functions.check_process_status import *
 from resource_management.libraries.functions.check_process_status import *
+from resource_management.libraries.functions.is_empty import *

+ 28 - 0
ambari-agent/src/main/python/resource_management/libraries/functions/is_empty.py

@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+from resource_management.libraries.script.config_dictionary import UnknownConfiguration
+
+def is_empty(var):
+  """
+  Check if certain configuration sent from the server has been received.
+  """
+  return isinstance(var, UnknownConfiguration)

+ 2 - 1
ambari-server/src/main/resources/custom_actions/ambari_hdfs_rebalancer.py

@@ -30,7 +30,8 @@ class HdfsRebalance(Script):
     hdfs_user = config['configurations']['global']['hdfs_user']
     hdfs_user = config['configurations']['global']['hdfs_user']
     conf_dir = "/etc/hadoop/conf"
     conf_dir = "/etc/hadoop/conf"
 
 
-    security_enabled = (config['configurations']['core-site']['hadoop.security.authentication'] == 'kerberos')
+    _authentication = config['configurations']['core-site']['hadoop.security.authentication']
+    security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
 
 
     threshold = config['commandParams']['threshold']
     threshold = config['commandParams']['threshold']
 
 

+ 2 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/params.py

@@ -30,7 +30,8 @@ jce_policy_zip = default("/hostLevelParams/jce_name", None) # None when jdk is a
 jce_location = config['hostLevelParams']['jdk_location']
 jce_location = config['hostLevelParams']['jdk_location']
 jdk_location = config['hostLevelParams']['jdk_location']
 jdk_location = config['hostLevelParams']['jdk_location']
 #security params
 #security params
-security_enabled = (config['configurations']['core-site']['hadoop.security.authentication'] == 'kerberos')
+_authentication = config['configurations']['core-site']['hadoop.security.authentication']
+security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
 dfs_journalnode_keytab_file = config['configurations']['hdfs-site']['dfs.journalnode.keytab.file']
 dfs_journalnode_keytab_file = config['configurations']['hdfs-site']['dfs.journalnode.keytab.file']
 dfs_web_authentication_kerberos_keytab = config['configurations']['hdfs-site']['dfs.journalnode.keytab.file']
 dfs_web_authentication_kerberos_keytab = config['configurations']['hdfs-site']['dfs.journalnode.keytab.file']
 dfs_secondary_namenode_keytab_file =  config['configurations']['hdfs-site']['fs.secondary.namenode.keytab.file']
 dfs_secondary_namenode_keytab_file =  config['configurations']['hdfs-site']['fs.secondary.namenode.keytab.file']

+ 2 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/params.py

@@ -35,7 +35,8 @@ hbase_drain_only = config['commandParams']['mark_draining_only']
 
 
 hbase_user = config['configurations']['global']['hbase_user']
 hbase_user = config['configurations']['global']['hbase_user']
 smokeuser = config['configurations']['global']['smokeuser']
 smokeuser = config['configurations']['global']['smokeuser']
-security_enabled = (config['configurations']['core-site']['hadoop.security.authentication'] == 'kerberos')
+_authentication = config['configurations']['core-site']['hadoop.security.authentication']
+security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
 user_group = config['configurations']['global']['user_group']
 user_group = config['configurations']['global']['user_group']
 
 
 # this is "hadoop-metrics2-hbase.properties" for 2.x stacks
 # this is "hadoop-metrics2-hbase.properties" for 2.x stacks

+ 2 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py

@@ -29,7 +29,8 @@ else:
   ulimit_cmd = "ulimit -c unlimited; "
   ulimit_cmd = "ulimit -c unlimited; "
 
 
 #security params
 #security params
-security_enabled = (config['configurations']['core-site']['hadoop.security.authentication'] == 'kerberos')
+_authentication = config['configurations']['core-site']['hadoop.security.authentication']
+security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
 dfs_journalnode_keytab_file = config['configurations']['hdfs-site']['dfs.journalnode.keytab.file']
 dfs_journalnode_keytab_file = config['configurations']['hdfs-site']['dfs.journalnode.keytab.file']
 dfs_web_authentication_kerberos_keytab = config['configurations']['hdfs-site']['dfs.journalnode.keytab.file']
 dfs_web_authentication_kerberos_keytab = config['configurations']['hdfs-site']['dfs.journalnode.keytab.file']
 dfs_secondary_namenode_keytab_file =  config['configurations']['hdfs-site']['dfs.secondary.namenode.keytab.file']
 dfs_secondary_namenode_keytab_file =  config['configurations']['hdfs-site']['dfs.secondary.namenode.keytab.file']

+ 2 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py

@@ -54,7 +54,8 @@ smoke_test_sql = "/tmp/hiveserver2.sql"
 smoke_test_path = "/tmp/hiveserver2Smoke.sh"
 smoke_test_path = "/tmp/hiveserver2Smoke.sh"
 smoke_user_keytab = config['configurations']['global']['smokeuser_keytab']
 smoke_user_keytab = config['configurations']['global']['smokeuser_keytab']
 
 
-security_enabled = (config['configurations']['core-site']['hadoop.security.authentication'] == 'kerberos')
+_authentication = config['configurations']['core-site']['hadoop.security.authentication']
+security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
 
 
 kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 hive_metastore_keytab_path =  config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
 hive_metastore_keytab_path =  config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']

+ 2 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/params.py

@@ -45,7 +45,8 @@ update_exclude_file_only = config['commandParams']['update_exclude_file_only']
 
 
 hadoop_jar_location = "/usr/lib/hadoop/"
 hadoop_jar_location = "/usr/lib/hadoop/"
 smokeuser = config['configurations']['global']['smokeuser']
 smokeuser = config['configurations']['global']['smokeuser']
-security_enabled = (config['configurations']['core-site']['hadoop.security.authentication'] == 'kerberos')
+_authentication = config['configurations']['core-site']['hadoop.security.authentication']
+security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
 smoke_user_keytab = config['configurations']['global']['smokeuser_keytab']
 smoke_user_keytab = config['configurations']['global']['smokeuser_keytab']
 kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 
 

+ 2 - 5
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/scripts/functions.py

@@ -19,13 +19,10 @@ limitations under the License.
 Ambari Agent
 Ambari Agent
 
 
 """
 """
-from resource_management.libraries.script.config_dictionary import UnknownConfiguration
+from resource_management import *
 
 
 def get_port_from_url(address):
 def get_port_from_url(address):
   if not is_empty(address):
   if not is_empty(address):
     return address.split(':')[-1]
     return address.split(':')[-1]
   else:
   else:
-    return address
-  
-def is_empty(var):
-  return isinstance(var, UnknownConfiguration)
+    return address

+ 2 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/NAGIOS/package/scripts/params.py

@@ -78,7 +78,8 @@ clientPort = config['configurations']['global']['clientPort'] #ZK
 
 
 
 
 java64_home = config['hostLevelParams']['java_home']
 java64_home = config['hostLevelParams']['java_home']
-security_enabled = (config['configurations']['core-site']['hadoop.security.authentication'] == 'kerberos')
+_authentication = config['configurations']['core-site']['hadoop.security.authentication']
+security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
 
 
 nagios_keytab_path = default("nagios_keytab_path", "/etc/security/keytabs/nagios.service.keytab")
 nagios_keytab_path = default("nagios_keytab_path", "/etc/security/keytabs/nagios.service.keytab")
 kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])

+ 2 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/params.py

@@ -22,7 +22,8 @@ hadoop_jar_location = "/usr/lib/hadoop/"
 ext_js_path = "/usr/share/HDP-oozie/ext.zip"
 ext_js_path = "/usr/share/HDP-oozie/ext.zip"
 oozie_libext_dir = "/usr/lib/oozie/libext"
 oozie_libext_dir = "/usr/lib/oozie/libext"
 lzo_enabled = config['configurations']['global']['lzo_enabled']
 lzo_enabled = config['configurations']['global']['lzo_enabled']
-security_enabled = (config['configurations']['core-site']['hadoop.security.authentication'] == 'kerberos')
+_authentication = config['configurations']['core-site']['hadoop.security.authentication']
+security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
 
 
 kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 oozie_service_keytab = config['configurations']['oozie-site']['oozie.service.HadoopAccessorService.keytab.file']
 oozie_service_keytab = config['configurations']['oozie-site']['oozie.service.HadoopAccessorService.keytab.file']

+ 2 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/package/scripts/params.py

@@ -30,7 +30,8 @@ hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user = config['configurations']['global']['hdfs_user']
 hdfs_user = config['configurations']['global']['hdfs_user']
 smokeuser = config['configurations']['global']['smokeuser']
 smokeuser = config['configurations']['global']['smokeuser']
 user_group = config['configurations']['global']['user_group']
 user_group = config['configurations']['global']['user_group']
-security_enabled = (config['configurations']['core-site']['hadoop.security.authentication'] == 'kerberos')
+_authentication = config['configurations']['core-site']['hadoop.security.authentication']
+security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
 smoke_user_keytab = config['configurations']['global']['smokeuser_keytab']
 smoke_user_keytab = config['configurations']['global']['smokeuser_keytab']
 kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 
 

+ 2 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/package/scripts/params.py

@@ -20,7 +20,8 @@ from resource_management import *
 
 
 config = Script.get_config()
 config = Script.get_config()
 
 
-security_enabled = (config['configurations']['core-site']['hadoop.security.authentication'] == 'kerberos')
+_authentication = config['configurations']['core-site']['hadoop.security.authentication']
+security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
 smokeuser = config['configurations']['global']['smokeuser']
 smokeuser = config['configurations']['global']['smokeuser']
 user_group = config['configurations']['global']['user_group']
 user_group = config['configurations']['global']['user_group']
 
 

+ 2 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/params.py

@@ -48,7 +48,8 @@ webhcat_server_host = config['clusterHostInfo']['webhcat_server_host']
 webhcat_apps_dir = "/apps/webhcat"
 webhcat_apps_dir = "/apps/webhcat"
 smoke_user_keytab = config['configurations']['global']['smokeuser_keytab']
 smoke_user_keytab = config['configurations']['global']['smokeuser_keytab']
 smokeuser = config['configurations']['global']['smokeuser']
 smokeuser = config['configurations']['global']['smokeuser']
-security_enabled = (config['configurations']['core-site']['hadoop.security.authentication'] == 'kerberos')
+_authentication = config['configurations']['core-site']['hadoop.security.authentication']
+security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
 kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 
 
 #hdfs directories
 #hdfs directories

+ 2 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/ZOOKEEPER/package/scripts/params.py

@@ -64,7 +64,8 @@ keytab_path = "/etc/security/keytabs"
 zk_keytab_path = format("{keytab_path}/zk.service.keytab")
 zk_keytab_path = format("{keytab_path}/zk.service.keytab")
 zk_server_jaas_file = format("{config_dir}/zookeeper_jaas.conf")
 zk_server_jaas_file = format("{config_dir}/zookeeper_jaas.conf")
 zk_client_jaas_file = format("{config_dir}/zookeeper_client_jaas.conf")
 zk_client_jaas_file = format("{config_dir}/zookeeper_client_jaas.conf")
-security_enabled = (config['configurations']['core-site']['hadoop.security.authentication'] == 'kerberos')
+_authentication = config['configurations']['core-site']['hadoop.security.authentication']
+security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
 
 
 smoke_user_keytab = config['configurations']['global']['smokeuser_keytab']
 smoke_user_keytab = config['configurations']['global']['smokeuser_keytab']
 smokeuser = config['configurations']['global']['smokeuser']
 smokeuser = config['configurations']['global']['smokeuser']

+ 2 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py

@@ -30,7 +30,8 @@ jce_policy_zip = default("/hostLevelParams/jce_name", None) # None when jdk is a
 jce_location = config['hostLevelParams']['jdk_location']
 jce_location = config['hostLevelParams']['jdk_location']
 jdk_location = config['hostLevelParams']['jdk_location']
 jdk_location = config['hostLevelParams']['jdk_location']
 #security params
 #security params
-security_enabled = (config['configurations']['core-site']['hadoop.security.authentication'] == 'kerberos')
+_authentication = config['configurations']['core-site']['hadoop.security.authentication']
+security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
 dfs_journalnode_keytab_file = config['configurations']['hdfs-site']['dfs.journalnode.keytab.file']
 dfs_journalnode_keytab_file = config['configurations']['hdfs-site']['dfs.journalnode.keytab.file']
 dfs_web_authentication_kerberos_keytab = config['configurations']['hdfs-site']['dfs.journalnode.keytab.file']
 dfs_web_authentication_kerberos_keytab = config['configurations']['hdfs-site']['dfs.journalnode.keytab.file']
 dfs_secondary_namenode_keytab_file =  config['configurations']['hdfs-site']['fs.secondary.namenode.keytab.file']
 dfs_secondary_namenode_keytab_file =  config['configurations']['hdfs-site']['fs.secondary.namenode.keytab.file']

+ 2 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py

@@ -35,7 +35,8 @@ hbase_drain_only = config['commandParams']['mark_draining_only']
 
 
 hbase_user = config['configurations']['global']['hbase_user']
 hbase_user = config['configurations']['global']['hbase_user']
 smokeuser = config['configurations']['global']['smokeuser']
 smokeuser = config['configurations']['global']['smokeuser']
-security_enabled = (config['configurations']['core-site']['hadoop.security.authentication'] == 'kerberos')
+_authentication = config['configurations']['core-site']['hadoop.security.authentication']
+security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
 user_group = config['configurations']['global']['user_group']
 user_group = config['configurations']['global']['user_group']
 
 
 # this is "hadoop-metrics.properties" for 1.x stacks
 # this is "hadoop-metrics.properties" for 1.x stacks

+ 2 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py

@@ -29,7 +29,8 @@ else:
   ulimit_cmd = "ulimit -c unlimited; "
   ulimit_cmd = "ulimit -c unlimited; "
 
 
 #security params
 #security params
-security_enabled = (config['configurations']['core-site']['hadoop.security.authentication'] == 'kerberos')
+_authentication = config['configurations']['core-site']['hadoop.security.authentication']
+security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
 dfs_journalnode_keytab_file = config['configurations']['hdfs-site']['dfs.journalnode.keytab.file']
 dfs_journalnode_keytab_file = config['configurations']['hdfs-site']['dfs.journalnode.keytab.file']
 dfs_web_authentication_kerberos_keytab = config['configurations']['hdfs-site']['dfs.journalnode.keytab.file']
 dfs_web_authentication_kerberos_keytab = config['configurations']['hdfs-site']['dfs.journalnode.keytab.file']
 dfs_secondary_namenode_keytab_file =  config['configurations']['hdfs-site']['dfs.secondary.namenode.keytab.file']
 dfs_secondary_namenode_keytab_file =  config['configurations']['hdfs-site']['dfs.secondary.namenode.keytab.file']

+ 2 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py

@@ -54,7 +54,8 @@ smoke_test_sql = "/tmp/hiveserver2.sql"
 smoke_test_path = "/tmp/hiveserver2Smoke.sh"
 smoke_test_path = "/tmp/hiveserver2Smoke.sh"
 smoke_user_keytab = config['configurations']['global']['smokeuser_keytab']
 smoke_user_keytab = config['configurations']['global']['smokeuser_keytab']
 
 
-security_enabled = (config['configurations']['core-site']['hadoop.security.authentication'] == 'kerberos')
+_authentication = config['configurations']['core-site']['hadoop.security.authentication']
+security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
 
 
 kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 hive_metastore_keytab_path =  config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
 hive_metastore_keytab_path =  config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']

+ 2 - 5
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/scripts/functions.py

@@ -19,13 +19,10 @@ limitations under the License.
 Ambari Agent
 Ambari Agent
 
 
 """
 """
-from resource_management.libraries.script.config_dictionary import UnknownConfiguration
+from resource_management import *
 
 
 def get_port_from_url(address):
 def get_port_from_url(address):
   if not is_empty(address):
   if not is_empty(address):
     return address.split(':')[-1]
     return address.split(':')[-1]
   else:
   else:
-    return address
-  
-def is_empty(var):
-  return isinstance(var, UnknownConfiguration)
+    return address

+ 2 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/scripts/params.py

@@ -72,7 +72,8 @@ clientPort = config['configurations']['global']['clientPort'] #ZK
 
 
 
 
 java64_home = config['hostLevelParams']['java_home']
 java64_home = config['hostLevelParams']['java_home']
-security_enabled = (config['configurations']['core-site']['hadoop.security.authentication'] == 'kerberos')
+_authentication = config['configurations']['core-site']['hadoop.security.authentication']
+security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
 
 
 nagios_keytab_path = default("nagios_keytab_path", "/etc/security/keytabs/nagios.service.keytab")
 nagios_keytab_path = default("nagios_keytab_path", "/etc/security/keytabs/nagios.service.keytab")
 kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])

+ 2 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py

@@ -42,7 +42,8 @@ hadoop_jar_location = "/usr/lib/hadoop/"
 ext_js_path = "/usr/share/HDP-oozie/ext-2.2.zip"
 ext_js_path = "/usr/share/HDP-oozie/ext-2.2.zip"
 oozie_libext_dir = "/usr/lib/oozie/libext"
 oozie_libext_dir = "/usr/lib/oozie/libext"
 lzo_enabled = config['configurations']['global']['lzo_enabled']
 lzo_enabled = config['configurations']['global']['lzo_enabled']
-security_enabled = (config['configurations']['core-site']['hadoop.security.authentication'] == 'kerberos')
+_authentication = config['configurations']['core-site']['hadoop.security.authentication']
+security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
 
 
 kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 oozie_service_keytab = config['configurations']['oozie-site']['oozie.service.HadoopAccessorService.keytab.file']
 oozie_service_keytab = config['configurations']['oozie-site']['oozie.service.HadoopAccessorService.keytab.file']

+ 2 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/params.py

@@ -30,7 +30,8 @@ hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user = config['configurations']['global']['hdfs_user']
 hdfs_user = config['configurations']['global']['hdfs_user']
 smokeuser = config['configurations']['global']['smokeuser']
 smokeuser = config['configurations']['global']['smokeuser']
 user_group = config['configurations']['global']['user_group']
 user_group = config['configurations']['global']['user_group']
-security_enabled = (config['configurations']['core-site']['hadoop.security.authentication'] == 'kerberos')
+_authentication = config['configurations']['core-site']['hadoop.security.authentication']
+security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
 smoke_user_keytab = config['configurations']['global']['smokeuser_keytab']
 smoke_user_keytab = config['configurations']['global']['smokeuser_keytab']
 kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 
 

+ 2 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py

@@ -21,7 +21,8 @@ from resource_management import *
 
 
 config = Script.get_config()
 config = Script.get_config()
 
 
-security_enabled = (config['configurations']['core-site']['hadoop.security.authentication'] == 'kerberos')
+_authentication = config['configurations']['core-site']['hadoop.security.authentication']
+security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
 smokeuser = config['configurations']['global']['smokeuser']
 smokeuser = config['configurations']['global']['smokeuser']
 user_group = config['configurations']['global']['user_group']
 user_group = config['configurations']['global']['user_group']
 
 

+ 2 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/params.py

@@ -53,7 +53,8 @@ webhcat_server_host = config['clusterHostInfo']['webhcat_server_host']
 webhcat_apps_dir = "/apps/webhcat"
 webhcat_apps_dir = "/apps/webhcat"
 smoke_user_keytab = config['configurations']['global']['smokeuser_keytab']
 smoke_user_keytab = config['configurations']['global']['smokeuser_keytab']
 smokeuser = config['configurations']['global']['smokeuser']
 smokeuser = config['configurations']['global']['smokeuser']
-security_enabled = (config['configurations']['core-site']['hadoop.security.authentication'] == 'kerberos')
+_authentication = config['configurations']['core-site']['hadoop.security.authentication']
+security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
 kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 
 
 hcat_hdfs_user_dir = format("/user/{hcat_user}")
 hcat_hdfs_user_dir = format("/user/{hcat_user}")

+ 2 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py

@@ -33,7 +33,8 @@ yarn_user = status_params.yarn_user
 hdfs_user = config['configurations']['global']['hdfs_user']
 hdfs_user = config['configurations']['global']['hdfs_user']
 
 
 smokeuser = config['configurations']['global']['smokeuser']
 smokeuser = config['configurations']['global']['smokeuser']
-security_enabled = (config['configurations']['core-site']['hadoop.security.authentication'] == 'kerberos')
+_authentication = config['configurations']['core-site']['hadoop.security.authentication']
+security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
 smoke_user_keytab = config['configurations']['global']['smokeuser_keytab']
 smoke_user_keytab = config['configurations']['global']['smokeuser_keytab']
 yarn_executor_container_group = config['configurations']['yarn-site']['yarn.nodemanager.linux-container-executor.group']
 yarn_executor_container_group = config['configurations']['yarn-site']['yarn.nodemanager.linux-container-executor.group']
 kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])

+ 2 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/params.py

@@ -64,7 +64,8 @@ keytab_path = "/etc/security/keytabs"
 zk_keytab_path = format("{keytab_path}/zk.service.keytab")
 zk_keytab_path = format("{keytab_path}/zk.service.keytab")
 zk_server_jaas_file = format("{config_dir}/zookeeper_jaas.conf")
 zk_server_jaas_file = format("{config_dir}/zookeeper_jaas.conf")
 zk_client_jaas_file = format("{config_dir}/zookeeper_client_jaas.conf")
 zk_client_jaas_file = format("{config_dir}/zookeeper_client_jaas.conf")
-security_enabled = (config['configurations']['core-site']['hadoop.security.authentication'] == 'kerberos')
+_authentication = config['configurations']['core-site']['hadoop.security.authentication']
+security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
 
 
 smoke_user_keytab = config['configurations']['global']['smokeuser_keytab']
 smoke_user_keytab = config['configurations']['global']['smokeuser_keytab']
 smokeuser = config['configurations']['global']['smokeuser']
 smokeuser = config['configurations']['global']['smokeuser']

+ 2 - 1
ambari-server/src/main/resources/stacks/HDP/2.1.1/services/FALCON/package/scripts/params.py

@@ -36,7 +36,8 @@ falcon_host = config['clusterHostInfo']['falcon_server_hosts'][0]
 falcon_port = config['configurations']['global']['falcon_port']
 falcon_port = config['configurations']['global']['falcon_port']
 
 
 #for create_hdfs_directory
 #for create_hdfs_directory
-security_enabled = (config['configurations']['core-site']['hadoop.security.authentication'] == 'kerberos')
+_authentication = config['configurations']['core-site']['hadoop.security.authentication']
+security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
 hostname = config["hostname"]
 hostname = config["hostname"]
 hadoop_conf_dir = "/etc/hadoop/conf"
 hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']
 hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']

+ 2 - 1
ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HIVE/package/scripts/params.py

@@ -54,7 +54,8 @@ smoke_test_sql = "/tmp/hiveserver2.sql"
 smoke_test_path = "/tmp/hiveserver2Smoke.sh"
 smoke_test_path = "/tmp/hiveserver2Smoke.sh"
 smoke_user_keytab = config['configurations']['global']['smokeuser_keytab']
 smoke_user_keytab = config['configurations']['global']['smokeuser_keytab']
 
 
-security_enabled = (config['configurations']['core-site']['hadoop.security.authentication'] == 'kerberos')
+_authentication = config['configurations']['core-site']['hadoop.security.authentication']
+security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
 
 
 kinit_path_local = get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 kinit_path_local = get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 hive_metastore_keytab_path =  config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
 hive_metastore_keytab_path =  config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']