Jelajahi Sumber

AMBARI-9826. Test Kerberos Client failed (Enebling security) after Ambari only upgrade from 1.7.0 to 2.0.0.(vbrodetskyi)

Vitaly Brodetskyi 10 tahun lalu
induk
melakukan
6b3c23535f
34 mengubah file dengan 138 tambahan dan 50 penghapusan
  1. 1 0
      ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
  2. 40 0
      ambari-common/src/main/python/resource_management/libraries/functions/get_kdestroy_path.py
  3. 3 5
      ambari-common/src/main/python/resource_management/libraries/functions/get_kinit_path.py
  4. 28 2
      ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandler.java
  5. 1 1
      ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
  6. 1 1
      ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/status_params.py
  7. 1 1
      ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params.py
  8. 1 1
      ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/status_params.py
  9. 3 3
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params.py
  10. 2 2
      ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/status_params.py
  11. 1 1
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params.py
  12. 1 1
      ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/status_params.py
  13. 1 1
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_hive_thrift_port.py
  14. 2 2
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_webhcat_server.py
  15. 2 2
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
  16. 1 1
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
  17. 8 6
      ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/kerberos_common.py
  18. 1 1
      ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/status_params.py
  19. 2 2
      ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params.py
  20. 1 1
      ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/status_params.py
  21. 1 1
      ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/alerts/alert_check_oozie_server.py
  22. 1 1
      ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py
  23. 1 1
      ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/status_params.py
  24. 1 1
      ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params.py
  25. 1 1
      ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py
  26. 2 2
      ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
  27. 1 1
      ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params.py
  28. 2 2
      ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/status_params.py
  29. 1 1
      ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params.py
  30. 1 1
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params.py
  31. 1 1
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/status_params.py
  32. 1 1
      ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/params.py
  33. 1 1
      ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/status_params.py
  34. 22 1
      ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandlerTest.java

+ 1 - 0
ambari-common/src/main/python/resource_management/libraries/functions/__init__.py

@@ -25,6 +25,7 @@ import platform
 from resource_management.libraries.functions.default import *
 from resource_management.libraries.functions.format import *
 from resource_management.libraries.functions.get_kinit_path import *
+from resource_management.libraries.functions.get_kdestroy_path import *
 from resource_management.libraries.functions.get_unique_id_and_date import *
 from resource_management.libraries.functions.check_process_status import *
 from resource_management.libraries.functions.is_empty import *

+ 40 - 0
ambari-common/src/main/python/resource_management/libraries/functions/get_kdestroy_path.py

@@ -0,0 +1,40 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+__all__ = ["get_kdestroy_path"]
+import os
+
+def get_kdestroy_path():
+
+  kdestroy_path = ""
+
+  for x in ["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"]:
+    if not x:
+      continue
+
+    path = os.path.join(x,"kdestroy")
+
+    if os.path.isfile(path):
+      kdestroy_path = path
+      break
+
+  return kdestroy_path

+ 3 - 5
ambari-common/src/main/python/resource_management/libraries/functions/get_kinit_path.py

@@ -23,13 +23,11 @@ Ambari Agent
 __all__ = ["get_kinit_path"]
 import os
 
-def get_kinit_path(pathes_list):
-  """
-  @param pathes: comma separated list
-  """
+def get_kinit_path():
+
   kinit_path = ""
   
-  for x in pathes_list:
+  for x in ["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"]:
     if not x:
       continue
     

+ 28 - 2
ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandler.java

@@ -18,6 +18,10 @@
 
 package org.apache.ambari.server.serveraction.kerberos;
 
+import com.google.inject.Inject;
+import com.google.inject.Injector;
+import org.apache.ambari.server.StaticallyInject;
+import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.utils.ShellCommandUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -39,6 +43,7 @@ import java.util.regex.Pattern;
  * It is assumed that a MIT Kerberos client is installed and that the kdamin shell command is
  * available
  */
+@StaticallyInject
 public class MITKerberosOperationHandler extends KerberosOperationHandler {
 
   /**
@@ -49,6 +54,8 @@ public class MITKerberosOperationHandler extends KerberosOperationHandler {
 
   private final static Logger LOG = LoggerFactory.getLogger(MITKerberosOperationHandler.class);
 
+  @Inject
+  private static Injector injector;
 
   /**
    * Prepares and creates resources to be used by this KerberosOperationHandler
@@ -87,6 +94,17 @@ public class MITKerberosOperationHandler extends KerberosOperationHandler {
     setOpen(false);
   }
 
+  /**
+   * Statically initialize the Injector
+   * <p/>
+   * This should only be used for unit tests.
+   *
+   * @param injector the Injector to (manually) statically inject
+   */
+  public static void init(Injector injector) {
+    MITKerberosOperationHandler.injector = injector;
+  }
+
   /**
    * Test to see if the specified principal exists in a previously configured MIT KDC
    * <p/>
@@ -307,15 +325,23 @@ public class MITKerberosOperationHandler extends KerberosOperationHandler {
           ? null
           : administratorCredentials.getPrincipal();
 
+      String pathToCommand = "";
+
+      Configuration configuration = injector.getInstance(Configuration.class);
+
+      if (configuration.getServerOsFamily().equals("redhat5")) {
+        pathToCommand = "/usr/kerberos/sbin/";
+      }
+
       if ((adminPrincipal == null) || adminPrincipal.isEmpty()) {
         // Set the kdamin interface to be kadmin.local
-        command.add("kadmin.local");
+        command.add(pathToCommand + "kadmin.local");
       } else {
         String adminPassword = administratorCredentials.getPassword();
         String adminKeyTab = administratorCredentials.getKeytab();
 
         // Set the kdamin interface to be kadmin
-        command.add("kadmin");
+        command.add(pathToCommand + "kadmin");
 
         // Add the administrative principal
         command.add("-p");

+ 1 - 1
ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py

@@ -149,7 +149,7 @@ hostname = config["hostname"]
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 
 import functools
 # create partial functions with common arguments for every HdfsDirectory call

+ 1 - 1
ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/status_params.py

@@ -34,6 +34,6 @@ ams_monitor_pid_dir = config['configurations']['ams-env']['metrics_monitor_pid_d
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 ams_hbase_conf_dir = format("{hbase_conf_dir}")
 
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 hostname = config['hostname']
 tmp_dir = Script.get_tmp_dir()

+ 1 - 1
ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params.py

@@ -92,7 +92,7 @@ hostname = config["hostname"]
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 import functools
 #create partial functions with common arguments for every HdfsDirectory call
 #to create hdfs directory we need to call params.HdfsDirectory in code

+ 1 - 1
ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/status_params.py

@@ -27,7 +27,7 @@ server_pid_file = format('{falcon_pid_dir}/falcon.pid')
 hostname = config['hostname']
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 hadoop_conf_dir = "/etc/hadoop/conf"
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 tmp_dir = Script.get_tmp_dir()
 falcon_conf_dir_prefix = "/etc/falcon"
 falcon_conf_dir = format("{falcon_conf_dir_prefix}/conf")

+ 3 - 3
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params.py

@@ -119,7 +119,7 @@ master_keytab_path = config['configurations']['hbase-site']['hbase.master.keytab
 regionserver_keytab_path = config['configurations']['hbase-site']['hbase.regionserver.keytab.file']
 smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
 hbase_user_keytab = config['configurations']['hbase-env']['hbase_user_keytab']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 if security_enabled:
   kinit_cmd = format("{kinit_path_local} -kt {hbase_user_keytab} {hbase_principal_name};")
 else:
@@ -140,7 +140,7 @@ hostname = config["hostname"]
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 import functools
 #create partial functions with common arguments for every HdfsDirectory call
 #to create hdfs directory we need to call params.HdfsDirectory in code
@@ -238,4 +238,4 @@ elif xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'oracle':
 downloaded_custom_connector = format("{exec_tmp_dir}/{jdbc_jar_name}")
 
 driver_curl_source = format("{jdk_location}/{jdbc_symlink_name}")
-driver_curl_target = format("{java_share_dir}/{jdbc_jar_name}")
+driver_curl_target = format("{java_share_dir}/{jdbc_jar_name}")

+ 2 - 2
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/status_params.py

@@ -28,9 +28,9 @@ hbase_user = config['configurations']['hbase-env']['hbase_user']
 # Security related/required params
 hostname = config['hostname']
 security_enabled = config['configurations']['cluster-env']['security_enabled']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 tmp_dir = Script.get_tmp_dir()
 
 
 hbase_conf_dir_prefix = "/etc/hbase"
-hbase_conf_dir = format("{hbase_conf_dir_prefix}/conf")
+hbase_conf_dir = format("{hbase_conf_dir_prefix}/conf")

+ 1 - 1
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params.py

@@ -99,7 +99,7 @@ hdfs_exclude_file = default("/clusterHostInfo/decom_dn_hosts", [])
 exclude_file_path = config['configurations']['hdfs-site']['dfs.hosts.exclude']
 update_exclude_file_only = default("/commandParams/update_exclude_file_only",False)
 
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 #hosts
 hostname = config["hostname"]
 rm_host = default("/clusterHostInfo/rm_host", [])

+ 1 - 1
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/status_params.py

@@ -36,5 +36,5 @@ security_enabled = config['configurations']['cluster-env']['security_enabled']
 hdfs_user_principal = config['configurations']['hadoop-env']['hdfs_principal_name']
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hadoop_conf_dir = "/etc/hadoop/conf"
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 tmp_dir = Script.get_tmp_dir()

+ 1 - 1
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_hive_thrift_port.py

@@ -101,7 +101,7 @@ def execute(parameters=None, host_name=None):
     smokeuser_keytab = SMOKEUSER_KEYTAB_DEFAULT
     if SMOKEUSER_KEYTAB_KEY in parameters:
       smokeuser_keytab = parameters[SMOKEUSER_KEYTAB_KEY]
-    kinit_path_local = get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+    kinit_path_local = get_kinit_path()
     kinitcmd=format("{kinit_path_local} -kt {smokeuser_keytab} {smokeuser}; ")
   else:
     hive_server_principal = None

+ 2 - 2
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_webhcat_server.py

@@ -101,7 +101,7 @@ def execute(parameters=None, host_name=None):
       # substitute _HOST in kerberos principal with actual fqdn
       webhcat_principal = webhcat_principal.replace('_HOST', host_name)
 
-      kinit_path_local = get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+      kinit_path_local = get_kinit_path()
       kinit_command = format("{kinit_path_local} -kt {webhcat_keytab} {webhcat_principal}; ")
 
       # kinit so that curl will work with --negotiate
@@ -181,4 +181,4 @@ def execute(parameters=None, host_name=None):
     result_code = RESULT_CODE_CRITICAL
     label = CRITICAL_WEBHCAT_STATUS_MESSAGE.format(webhcat_status)
 
-  return (result_code, [label])
+  return (result_code, [label])

+ 2 - 2
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py

@@ -159,7 +159,7 @@ smokeuser_principal = config['configurations']['cluster-env']['smokeuser_princip
 fs_root = config['configurations']['core-site']['fs.defaultFS']
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 hive_metastore_keytab_path =  config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
 
 hive_server2_keytab = config['configurations']['hive-site']['hive.server2.authentication.kerberos.keytab']
@@ -404,4 +404,4 @@ ranger_driver_curl_source = format("{jdk_location}/{ranger_jdbc_symlink_name}")
 ranger_driver_curl_target = format("{java_share_dir}/{ranger_jdbc_jar_name}")
 
 if security_enabled:
-  hive_principal = hive_server_principal.replace('_HOST',hostname.lower())
+  hive_principal = hive_server_principal.replace('_HOST',hostname.lower())

+ 1 - 1
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py

@@ -41,7 +41,7 @@ else:
 hostname = config['hostname']
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 hadoop_conf_dir = "/etc/hadoop/conf"
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 tmp_dir = Script.get_tmp_dir()
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hive_user = config['configurations']['hive-env']['hive_user']

+ 8 - 6
ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/kerberos_common.py

@@ -303,6 +303,8 @@ class KerberosScript(Script):
   @staticmethod
   def test_kinit(identity, user=None):
     principal = get_property_value(identity, 'principal')
+    kinit_path_local = functions.get_kinit_path()
+    kdestroy_path_local = functions.get_kdestroy_path()
 
     if principal is not None:
       keytab_file = get_property_value(identity, 'keytab_file')
@@ -311,11 +313,11 @@ class KerberosScript(Script):
 
       # If a test keytab file is available, simply use it
       if (keytab_file is not None) and (os.path.isfile(keytab_file)):
-        command = 'kinit -k -t %s %s' % (keytab_file, principal)
+        command = '%s -k -t %s %s' % (kinit_path_local, keytab_file, principal)
         Execute(command,
           user = user,
         )
-        return shell.checked_call('kdestroy')
+        return shell.checked_call(kdestroy_path_local)
 
       # If base64-encoded test keytab data is available; then decode it, write it to a temporary file
       # use it, and then remove the temporary file
@@ -325,11 +327,11 @@ class KerberosScript(Script):
         os.close(fd)
 
         try:
-          command = 'kinit -k -t %s %s' % (test_keytab_file, principal)
+          command = '%s -k -t %s %s' % (kinit_path_local, test_keytab_file, principal)
           Execute(command,
             user = user,
           )
-          return shell.checked_call('kdestroy')
+          return shell.checked_call(kdestroy_path_local)
         except:
           raise
         finally:
@@ -338,13 +340,13 @@ class KerberosScript(Script):
 
       # If no keytab data is available and a password was supplied, simply use it.
       elif password is not None:
-        process = subprocess.Popen(['kinit', principal], stdin=subprocess.PIPE)
+        process = subprocess.Popen([kinit_path_local, principal], stdin=subprocess.PIPE)
         stdout, stderr = process.communicate(password)
         if process.returncode:
           err_msg = Logger.filter_text("Execution of kinit returned %d. %s" % (process.returncode, stderr))
           raise Fail(err_msg)
         else:
-          return shell.checked_call('kdestroy')
+          return shell.checked_call(kdestroy_path_local)
       else:
         return 0, ''
     else:

+ 1 - 1
ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/status_params.py

@@ -23,7 +23,7 @@ config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
 hostname = config['hostname']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 

+ 2 - 2
ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params.py

@@ -126,7 +126,7 @@ security_enabled = config['configurations']['cluster-env']['security_enabled']
 smokeuser = config['configurations']['cluster-env']['smokeuser']
 smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name']
 smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 if security_enabled:
   knox_keytab_path = config['configurations']['knox-env']['knox_keytab_path']
   _hostname_lowercase = config['hostname'].lower()
@@ -201,4 +201,4 @@ elif xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'oracle':
 downloaded_custom_connector = format("{tmp_dir}/{jdbc_jar_name}")
 
 driver_curl_source = format("{jdk_location}/{jdbc_symlink_name}")
-driver_curl_target = format("{java_share_dir}/{jdbc_jar_name}")
+driver_curl_target = format("{java_share_dir}/{jdbc_jar_name}")

+ 1 - 1
ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/status_params.py

@@ -36,5 +36,5 @@ else:
     knox_principal_name = None
 hostname = config['hostname'].lower()
 knox_user = default("/configurations/knox-env/knox_user", "knox")
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 temp_dir = Script.get_tmp_dir()

+ 1 - 1
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/alerts/alert_check_oozie_server.py

@@ -80,7 +80,7 @@ def execute(parameters=None, host_name=None):
       else:
         return (RESULT_CODE_UNKNOWN, ['The Oozie keytab and principal are required parameters when security is enabled.'])
 
-      kinit_path_local = get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+      kinit_path_local = get_kinit_path()
       kinit_command = format("{kinit_path_local} -kt {oozie_keytab} {oozie_principal}; ")
 
       # kinit

+ 1 - 1
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py

@@ -114,7 +114,7 @@ security_enabled = config['configurations']['cluster-env']['security_enabled']
 oozie_heapsize = config['configurations']['oozie-env']['oozie_heapsize']
 oozie_permsize = config['configurations']['oozie-env']['oozie_permsize']
 
-kinit_path_local = get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+kinit_path_local = get_kinit_path()
 oozie_service_keytab = config['configurations']['oozie-site']['oozie.service.HadoopAccessorService.keytab.file']
 oozie_principal = config['configurations']['oozie-site']['oozie.service.HadoopAccessorService.kerberos.principal']
 http_principal = config['configurations']['oozie-site']['oozie.authentication.kerberos.principal']

+ 1 - 1
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/status_params.py

@@ -26,7 +26,7 @@ oozie_pid_dir = config['configurations']['oozie-env']['oozie_pid_dir']
 pid_file = format("{oozie_pid_dir}/oozie.pid")
 
 security_enabled = config['configurations']['cluster-env']['security_enabled']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 conf_dir = "/etc/oozie/conf"
 tmp_dir = Script.get_tmp_dir()
 oozie_user = config['configurations']['oozie-env']['oozie_user']

+ 1 - 1
ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params.py

@@ -54,7 +54,7 @@ smokeuser_principal = config['configurations']['cluster-env']['smokeuser_princip
 user_group = config['configurations']['cluster-env']['user_group']
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 pig_env_sh_template = config['configurations']['pig-env']['content']
 
 # not supporting 32 bit jdk.

+ 1 - 1
ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py

@@ -50,7 +50,7 @@ smokeuser = config['configurations']['cluster-env']['smokeuser']
 smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name']
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 smokeuser_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 slider_env_sh_template = config['configurations']['slider-env']['content']
 
 java64_home = config['hostLevelParams']['java_home']

+ 2 - 2
ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py

@@ -131,7 +131,7 @@ if spark_javaopts_properties.find('-Dhdp.version') == -1:
   spark_javaopts_properties = spark_javaopts_properties+ ' -Dhdp.version=' + str(hdp_full_version)
 
 security_enabled = config['configurations']['cluster-env']['security_enabled']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 spark_kerberos_keytab =  config['configurations']['spark-defaults']['spark.history.kerberos.keytab']
 spark_kerberos_principal =  config['configurations']['spark-defaults']['spark.history.kerberos.principal']
 if security_enabled:
@@ -150,4 +150,4 @@ HdfsDirectory = functools.partial(
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
   bin_dir = hadoop_bin_dir
-)
+)

+ 1 - 1
ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params.py

@@ -57,4 +57,4 @@ sqoop_env_sh_template = config['configurations']['sqoop-env']['content']
 sqoop_user = config['configurations']['sqoop-env']['sqoop_user']
 
 smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()

+ 2 - 2
ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/status_params.py

@@ -40,9 +40,9 @@ pid_files = {"logviewer":pid_logviewer,
 # Security related/required params
 hostname = config['hostname']
 security_enabled = config['configurations']['cluster-env']['security_enabled']
-kinit_path_local = get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+kinit_path_local = get_kinit_path()
 tmp_dir = Script.get_tmp_dir()
 conf_dir = "/etc/storm/conf"
 storm_user = config['configurations']['storm-env']['storm_user']
 storm_ui_principal = default('/configurations/storm-env/storm_ui_principal_name', None)
-storm_ui_keytab = default('/configurations/storm-env/storm_ui_keytab', None)
+storm_ui_keytab = default('/configurations/storm-env/storm_ui_keytab', None)

+ 1 - 1
ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params.py

@@ -39,7 +39,7 @@ else:
   hadoop_bin_dir = "/usr/bin"
 hadoop_conf_dir = "/etc/hadoop/conf"
 
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']

+ 1 - 1
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params.py

@@ -93,7 +93,7 @@ smokeuser_principal = config['configurations']['cluster-env']['smokeuser_princip
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
 yarn_executor_container_group = config['configurations']['yarn-site']['yarn.nodemanager.linux-container-executor.group']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 rm_hosts = config['clusterHostInfo']['rm_host']
 rm_host = rm_hosts[0]
 rm_port = config['configurations']['yarn-site']['yarn.resourcemanager.webapp.address'].split(':')[-1]

+ 1 - 1
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/status_params.py

@@ -39,5 +39,5 @@ mapred_historyserver_pid_file = format("{mapred_pid_dir}/mapred-{mapred_user}-hi
 # Security related/required params
 hadoop_conf_dir = "/etc/hadoop/conf"
 hostname = config['hostname']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 security_enabled = config['configurations']['cluster-env']['security_enabled']

+ 1 - 1
ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/params.py

@@ -89,7 +89,7 @@ security_enabled = config['configurations']['cluster-env']['security_enabled']
 smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
 smokeuser = config['configurations']['cluster-env']['smokeuser']
 smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 
 #log4j.properties
 if (('zookeeper-log4j' in config['configurations']) and ('content' in config['configurations']['zookeeper-log4j'])):

+ 1 - 1
ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5.2.0/package/scripts/status_params.py

@@ -28,7 +28,7 @@ zk_pid_file = format("{zk_pid_dir}/zookeeper_server.pid")
 # Security related/required params
 hostname = config['hostname']
 security_enabled = config['configurations']['cluster-env']['security_enabled']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+kinit_path_local = functions.get_kinit_path()
 tmp_dir = Script.get_tmp_dir()
 config_dir = "/etc/zookeeper/conf"
 zk_user =  config['configurations']['zookeeper-env']['zk_user']

+ 22 - 1
ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandlerTest.java

@@ -18,10 +18,15 @@
 
 package org.apache.ambari.server.serveraction.kerberos;
 
+import com.google.inject.AbstractModule;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
 import junit.framework.Assert;
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.utils.ShellCommandUtil;
-import org.easymock.EasyMockSupport;
 import org.easymock.IAnswer;
+import org.junit.BeforeClass;
 import org.junit.Ignore;
 import org.junit.Test;
 
@@ -31,6 +36,7 @@ import java.util.Map;
 import static org.easymock.EasyMock.anyObject;
 import static org.easymock.EasyMock.expect;
 import static org.easymock.EasyMock.replay;
+import static org.mockito.Mockito.mock;
 
 
 public class MITKerberosOperationHandlerTest extends KerberosOperationHandlerTest {
@@ -39,6 +45,14 @@ public class MITKerberosOperationHandlerTest extends KerberosOperationHandlerTes
   private static final String DEFAULT_ADMIN_PASSWORD = "hadoop";
   private static final String DEFAULT_REALM = "EXAMPLE.COM";
 
+  private static Injector injector;
+
+  @BeforeClass
+  public static void beforeClass() throws AmbariException {
+    injector = Guice.createInjector(new MockModule());
+    MITKerberosOperationHandler.init(injector);
+  }
+
   private static final Map<String, String> KERBEROS_ENV_MAP = new HashMap<String, String>() {
     {
       put(MITKerberosOperationHandler.KERBEROS_ENV_ENCRYPTION_TYPES, null);
@@ -426,4 +440,11 @@ public class MITKerberosOperationHandlerTest extends KerberosOperationHandlerTes
     handler.close();
   }
 
+  public static class MockModule extends AbstractModule {
+    @Override
+    protected void configure() {
+      bind(Clusters.class).toInstance(mock(Clusters.class));
+    }
+  }
+
 }