Просмотр исходного кода

AMBARI-11176. Hive Upgrade Pack For HDP-2.2 to HDP-2.3 (ncole)

Nate Cole 10 лет назад
Родитель
Сommit
c23de40d31
19 измененных файлов с 437 добавлено и 63 удалено
  1. 3 1
      ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
  2. 33 4
      ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ConfigureAction.java
  3. 55 0
      ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigureTask.java
  4. 1 1
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/webhcat-env.xml
  5. 3 1
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/startMetastore.sh
  6. 8 1
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/templetonSmoke.sh
  7. 10 6
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_service_check.py
  8. 27 1
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
  9. 14 10
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
  10. 13 7
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py
  11. 1 1
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
  12. 17 10
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service.py
  13. 6 0
      ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.3.xml
  14. 76 0
      ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ConfigureActionTest.java
  15. 148 2
      ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
  16. 5 5
      ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
  17. 10 6
      ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
  18. 3 3
      ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
  19. 4 4
      ambari-server/src/test/python/stacks/utils/RMFTestCase.py

+ 3 - 1
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java

@@ -1019,15 +1019,17 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
         ConfigureTask ct = (ConfigureTask) task;
         ConfigureTask ct = (ConfigureTask) task;
         Map<String, String> configProperties = ct.getConfigurationProperties(cluster);
         Map<String, String> configProperties = ct.getConfigurationProperties(cluster);
         List<ConfigureTask.Transfer> transfers = ct.getTransfers();
         List<ConfigureTask.Transfer> transfers = ct.getTransfers();
+        List<ConfigureTask.Replace> replacements = ct.getReplacements();
 
 
         // if the properties are empty it means that the conditions in the
         // if the properties are empty it means that the conditions in the
         // task did not pass;
         // task did not pass;
-        if (configProperties.isEmpty() && transfers.isEmpty()) {
+        if (configProperties.isEmpty() && transfers.isEmpty() && replacements.isEmpty()) {
           stageText = "No conditions were met for this configuration task.";
           stageText = "No conditions were met for this configuration task.";
           itemDetail = stageText;
           itemDetail = stageText;
         } else {
         } else {
           commandParams.putAll(configProperties);
           commandParams.putAll(configProperties);
           commandParams.put(ConfigureTask.PARAMETER_TRANSFERS, s_gson.toJson(transfers));
           commandParams.put(ConfigureTask.PARAMETER_TRANSFERS, s_gson.toJson(transfers));
+          commandParams.put(ConfigureTask.PARAMETER_REPLACEMENTS, s_gson.toJson(replacements));
 
 
           // extract the config type, key and value to use to build the
           // extract the config type, key and value to use to build the
           // summary and detail
           // summary and detail

+ 33 - 4
ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ConfigureAction.java

@@ -164,18 +164,26 @@ public class ConfigureAction extends AbstractServerAction {
         transferJson, new TypeToken<List<ConfigureTask.Transfer>>(){}.getType());
         transferJson, new TypeToken<List<ConfigureTask.Transfer>>(){}.getType());
     }
     }
 
 
+    List<ConfigureTask.Replace> replacements = Collections.emptyList();
+    String replaceJson = commandParameters.get(ConfigureTask.PARAMETER_REPLACEMENTS);
+    if (null != replaceJson) {
+      replacements = m_gson.fromJson(
+          replaceJson, new TypeToken<List<ConfigureTask.Replace>>(){}.getType());
+    }
+
     // if the two required properties are null and no transfer properties, then
     // if the two required properties are null and no transfer properties, then
     // assume that no conditions were met and let the action complete
     // assume that no conditions were met and let the action complete
-    if (null == configType && null == key && transfers.isEmpty()) {
+    if (null == configType && null == key && transfers.isEmpty() && replacements.isEmpty()) {
       return createCommandReport(0, HostRoleStatus.COMPLETED, "{}", "",
       return createCommandReport(0, HostRoleStatus.COMPLETED, "{}", "",
           "Skipping configuration task");
           "Skipping configuration task");
     }
     }
 
 
     // if only 1 of the required properties was null and no transfer properties,
     // if only 1 of the required properties was null and no transfer properties,
     // then something went wrong
     // then something went wrong
-    if (null == clusterName || null == configType || (null == key && transfers.isEmpty())) {
-      String message = "cluster={0}, type={1}, key={2}, transfers={3}";
-      message = MessageFormat.format(message, clusterName, configType, key, transfers);
+    if (null == clusterName || null == configType ||
+        (null == key && transfers.isEmpty() && replacements.isEmpty())) {
+      String message = "cluster={0}, type={1}, key={2}, transfers={3}, replacements={4}";
+      message = MessageFormat.format(message, clusterName, configType, key, transfers, replacements);
       return createCommandReport(0, HostRoleStatus.FAILED, "{}", "", message);
       return createCommandReport(0, HostRoleStatus.FAILED, "{}", "", message);
     }
     }
 
 
@@ -337,6 +345,27 @@ public class ConfigureAction extends AbstractServerAction {
       outputBuffer.append(MessageFormat.format("{0}/{1} changed to {2}\n", configType, key, value));
       outputBuffer.append(MessageFormat.format("{0}/{1} changed to {2}\n", configType, key, value));
     }
     }
 
 
+
+    // !!! string replacements happen only on the new values.
+    for (ConfigureTask.Replace replacement : replacements) {
+      if (newValues.containsKey(replacement.key)) {
+        String toReplace = newValues.get(replacement.key);
+
+        if (!toReplace.contains(replacement.find)) {
+          outputBuffer.append(MessageFormat.format("String {0} was not found in {1}/{2}\n",
+              replacement.find, configType, replacement.key));
+        } else {
+          String replaced = StringUtils.replace(toReplace, replacement.find, replacement.replaceWith);
+
+          newValues.put(replacement.key, replaced);
+
+          outputBuffer.append(MessageFormat.format("Replaced {0}/{1} containing \"{2}\" with \"{3}\"\n",
+            configType, replacement.key, replacement.find, replacement.replaceWith));
+        }
+      }
+    }
+
+
     // !!! check to see if we're going to a new stack and double check the
     // !!! check to see if we're going to a new stack and double check the
     // configs are for the target.  Then simply update the new properties instead
     // configs are for the target.  Then simply update the new properties instead
     // of creating a whole new history record since it was already done
     // of creating a whole new history record since it was already done

+ 55 - 0
ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigureTask.java

@@ -101,6 +101,12 @@ public class ConfigureTask extends ServerSideActionTask {
    */
    */
   public static final String PARAMETER_TRANSFERS = "configure-task-transfers";
   public static final String PARAMETER_TRANSFERS = "configure-task-transfers";
 
 
+  /**
+   * Replacements can be several per task, so they're passed in as a json-ified list of
+   * objects.
+   */
+  public static final String PARAMETER_REPLACEMENTS = "configure-task-replacements";
+
   /**
   /**
    * Constructor.
    * Constructor.
    *
    *
@@ -130,6 +136,9 @@ public class ConfigureTask extends ServerSideActionTask {
   @XmlElement(name = "transfer")
   @XmlElement(name = "transfer")
   private List<Transfer> transfers;
   private List<Transfer> transfers;
 
 
+  @XmlElement(name="replace")
+  private List<Replace> replacements;
+
   @Override
   @Override
   public Type getType() {
   public Type getType() {
     return type;
     return type;
@@ -262,6 +271,51 @@ public class ConfigureTask extends ServerSideActionTask {
     return list;
     return list;
   }
   }
 
 
+  /**
+   * Used to replace strings in a key with other strings.  More complex
+   * scenarios will be possible with regex (when needed)
+   */
+  @XmlAccessorType(XmlAccessType.FIELD)
+  @XmlType(name = "replace")
+  public static class Replace {
+    /**
+     * The key name
+     */
+    @XmlAttribute(name="key")
+    public String key;
+
+    /**
+     * The string to find
+     */
+    @XmlAttribute(name="find")
+    public String find;
+
+    /**
+     * The string to replace
+     */
+    @XmlAttribute(name="replace-with")
+    public String replaceWith;
+  }
+
+  /**
+   * @return the replacement tokens, never {@code null}
+   */
+  public List<Replace> getReplacements() {
+    if (null == replacements) {
+      return Collections.emptyList();
+    }
+
+    List<Replace> list = new ArrayList<Replace>();
+    for (Replace r : replacements) {
+      if (null == r.key || null == r.find || null == r.replaceWith) {
+        continue;
+      }
+      list.add(r);
+    }
+
+    return list;
+  }
+
   /**
   /**
    * Gets a map containing the following properties pertaining to the
    * Gets a map containing the following properties pertaining to the
    * configuration value to change:
    * configuration value to change:
@@ -341,4 +395,5 @@ public class ConfigureTask extends ServerSideActionTask {
 
 
     return config.getProperties().get(propertyKey);
     return config.getProperties().get(propertyKey);
   }
   }
+
 }
 }

+ 1 - 1
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/webhcat-env.xml

@@ -47,7 +47,7 @@ CONSOLE_LOG={{templeton_log_dir}}/webhcat-console.log
 #HCAT_PREFIX=hive_prefix
 #HCAT_PREFIX=hive_prefix
 
 
 # Set HADOOP_HOME to point to a specific hadoop install directory
 # Set HADOOP_HOME to point to a specific hadoop install directory
-export HADOOP_HOME={{hadoop_home}}
+export HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}
     </value>
     </value>
   </property>
   </property>
   
   

+ 3 - 1
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/startMetastore.sh

@@ -19,5 +19,7 @@
 # under the License.
 # under the License.
 #
 #
 #
 #
-HIVE_CONF_DIR=$4 hive --service metastore -hiveconf hive.log.file=hivemetastore.log -hiveconf hive.log.dir=$5 > $1 2> $2 &
+HIVE_BIN=${HIVE_BIN:-"hive"}
+
+HIVE_CONF_DIR=$4 $HIVE_BIN --service metastore -hiveconf hive.log.file=hivemetastore.log -hiveconf hive.log.dir=$5 > $1 2> $2 &
 echo $!|cat>$3
 echo $!|cat>$3

+ 8 - 1
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/templetonSmoke.sh

@@ -37,10 +37,17 @@ else
 fi
 fi
 
 
 export no_proxy=$ttonhost
 export no_proxy=$ttonhost
-cmd="${kinitcmd}curl --negotiate -u : -s -w 'http_code <%{http_code}>'    $ttonurl/status 2>&1"
+cmd="${kinitcmd}curl --negotiate -u : -s -w 'http_code <%{http_code}>'  $ttonurl/status 2>&1"
 retVal=`/var/lib/ambari-agent/ambari-sudo.sh su ${smoke_test_user} -s /bin/bash - -c "$cmd"`
 retVal=`/var/lib/ambari-agent/ambari-sudo.sh su ${smoke_test_user} -s /bin/bash - -c "$cmd"`
 httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
 httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
 
 
+# try again for 2.3 username requirement
+if [[ "$httpExitCode" == "500" ]] ; then
+  cmd="${kinitcmd}curl --negotiate -u : -s -w 'http_code <%{http_code}>'  $ttonurl/status?user.name=$smoke_test_user 2>&1"
+  retVal=`/var/lib/ambari-agent/ambari-sudo.sh su ${smoke_test_user} -s /bin/bash - -c "$cmd"`
+  httpExitCode=`echo $retVal |sed 's/.*http_code <\([0-9]*\)>.*/\1/'`
+fi
+
 if [[ "$httpExitCode" -ne "200" ]] ; then
 if [[ "$httpExitCode" -ne "200" ]] ; then
   echo "Templeton Smoke Test (status cmd): Failed. : $retVal"
   echo "Templeton Smoke Test (status cmd): Failed. : $retVal"
   export TEMPLETON_EXIT_CODE=1
   export TEMPLETON_EXIT_CODE=1

+ 10 - 6
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_service_check.py

@@ -18,6 +18,7 @@ limitations under the License.
 
 
 """
 """
 
 
+import os
 from resource_management import *
 from resource_management import *
 from resource_management.libraries.functions import get_unique_id_and_date
 from resource_management.libraries.functions import get_unique_id_and_date
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
@@ -51,11 +52,16 @@ def hcat_service_check():
 
 
     prepare_cmd = format("{kinit_cmd}env JAVA_HOME={java64_home} {tmp_dir}/hcatSmoke.sh hcatsmoke{unique} prepare")
     prepare_cmd = format("{kinit_cmd}env JAVA_HOME={java64_home} {tmp_dir}/hcatSmoke.sh hcatsmoke{unique} prepare")
 
 
+    exec_path = params.execute_path
+    if params.version and params.stack_name:
+      upgrade_hive_bin = format("/usr/hdp/{version}/hive/bin") 
+      exec_path =  os.environ['PATH'] + os.pathsep + params.hadoop_bin_dir + os.pathsep + upgrade_hive_bin
+
     Execute(prepare_cmd,
     Execute(prepare_cmd,
             tries=3,
             tries=3,
             user=params.smokeuser,
             user=params.smokeuser,
             try_sleep=5,
             try_sleep=5,
-            path=['/usr/sbin', '/usr/local/bin', '/bin', '/usr/bin', params.execute_path],
+            path=['/usr/sbin', '/usr/local/bin', '/bin', '/usr/bin', exec_path],
             logoutput=True)
             logoutput=True)
 
 
     if params.security_enabled:
     if params.security_enabled:
@@ -67,8 +73,7 @@ def hcat_service_check():
                     kinit_path_local=params.kinit_path_local,
                     kinit_path_local=params.kinit_path_local,
                     keytab=params.hdfs_user_keytab,
                     keytab=params.hdfs_user_keytab,
                     principal=params.hdfs_principal_name,
                     principal=params.hdfs_principal_name,
-                    bin_dir=params.execute_path
-      )
+                    bin_dir=params.execute_path)
     else:
     else:
       ExecuteHadoop(test_cmd,
       ExecuteHadoop(test_cmd,
                     user=params.hdfs_user,
                     user=params.hdfs_user,
@@ -86,6 +91,5 @@ def hcat_service_check():
             tries=3,
             tries=3,
             user=params.smokeuser,
             user=params.smokeuser,
             try_sleep=5,
             try_sleep=5,
-            path=['/usr/sbin', '/usr/local/bin', '/bin', '/usr/bin', params.execute_path],
-            logoutput=True
-    )
+            path=['/usr/sbin', '/usr/local/bin', '/bin', '/usr/bin', exec_path],
+            logoutput=True)

+ 27 - 1
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py

@@ -40,7 +40,7 @@ class HiveMetastore(Script):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
     self.configure(env)  # FOR SECURITY
     self.configure(env)  # FOR SECURITY
-    hive_service('metastore', action='start')
+    hive_service('metastore', action='start', rolling_restart=rolling_restart)
 
 
   def stop(self, env, rolling_restart=False):
   def stop(self, env, rolling_restart=False):
     import params
     import params
@@ -77,6 +77,9 @@ class HiveMetastoreDefault(HiveMetastore):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
 
 
+    if Script.is_hdp_stack_greater_or_equal("2.3"):
+      self.upgrade_schema(env)
+
     if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
     if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
       conf_select.select(params.stack_name, "hive", params.version)
       conf_select.select(params.stack_name, "hive", params.version)
       hdp_select.select("hive-metastore", params.version)
       hdp_select.select("hive-metastore", params.version)
@@ -131,5 +134,28 @@ class HiveMetastoreDefault(HiveMetastore):
     else:
     else:
       self.put_structured_out({"securityState": "UNSECURED"})
       self.put_structured_out({"securityState": "UNSECURED"})
 
 
+  def upgrade_schema(self, env):
+    """
+    Executes the schema upgrade binary.  This is its own function because it could
+    be called as a standalone task from the upgrade pack, but is safe to run it for each
+    metastore instance.
+    """
+    Logger.info("Upgrading Hive Metastore")
+    import params
+    env.set_params(params)
+
+    if params.security_enabled:
+      kinit_command=format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser_principal}; ")
+      Execute(kinit_command,user=params.smokeuser)
+
+    binary = format("/usr/hdp/{version}/hive/bin/schematool")
+
+    env_dict = {
+      'HIVE_CONF_DIR': params.hive_server_conf_dir
+    }
+
+    command = format("{binary} -dbType {hive_metastore_db_type} -upgradeSchema")
+    Execute(command, user=params.hive_user, tries=1, environment=env_dict, logoutput=True)
+
 if __name__ == "__main__":
 if __name__ == "__main__":
   HiveMetastore().execute()
   HiveMetastore().execute()

+ 14 - 10
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py

@@ -24,6 +24,7 @@ from hive import hive
 from hive_service import hive_service
 from hive_service import hive_service
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import hdp_select
 from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions.get_hdp_version import get_hdp_version
 from resource_management.libraries.functions.security_commons import build_expectations, \
 from resource_management.libraries.functions.security_commons import build_expectations, \
   cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
   cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
   FILE_TYPE_XML
   FILE_TYPE_XML
@@ -72,8 +73,7 @@ class HiveServerDefault(HiveServer):
     self.configure(env) # FOR SECURITY
     self.configure(env) # FOR SECURITY
 
 
     setup_ranger_hive()    
     setup_ranger_hive()    
-    hive_service( 'hiveserver2', action = 'start',
-      rolling_restart=rolling_restart )
+    hive_service( 'hiveserver2', action = 'start', rolling_restart=rolling_restart)
 
 
   def stop(self, env, rolling_restart=False):
   def stop(self, env, rolling_restart=False):
     import params
     import params
@@ -100,14 +100,18 @@ class HiveServerDefault(HiveServer):
     if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
     if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
       conf_select.select(params.stack_name, "hive", params.version)
       conf_select.select(params.stack_name, "hive", params.version)
       hdp_select.select("hive-server2", params.version)
       hdp_select.select("hive-server2", params.version)
-      params.HdfsResource(InlineTemplate(params.mapreduce_tar_destination).get_content(),
-                          type="file",
-                          action="create_on_execute",
-                          source=params.mapreduce_tar_source,
-                          group=params.user_group,
-                          mode=params.tarballs_mode
-      )
-      params.HdfsResource(None, action="execute")
+      old = params.hdp_stack_version
+      try:
+        params.hdp_stack_version = get_hdp_version('hive-server2')
+        params.HdfsResource(InlineTemplate(params.mapreduce_tar_destination).get_content(),
+                            type="file",
+                            action="create_on_execute",
+                            source=params.mapreduce_tar_source,
+                            group=params.user_group,
+                            mode=params.tarballs_mode)
+        params.HdfsResource(None, action="execute")
+      finally:
+        params.hdp_stack_version = old
 
 
   def security_status(self, env):
   def security_status(self, env):
     import status_params
     import status_params

+ 13 - 7
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py

@@ -57,7 +57,9 @@ def hive_service(name, action='start', rolling_restart=False):
     if name == 'hiveserver2':
     if name == 'hiveserver2':
       check_fs_root()
       check_fs_root()
 
 
-    demon_cmd = cmd
+    daemon_cmd = cmd
+    hadoop_home = params.hadoop_home
+    hive_bin = "hive"
 
 
     # upgrading hiveserver2 (rolling_restart) means that there is an existing,
     # upgrading hiveserver2 (rolling_restart) means that there is an existing,
     # de-registering hiveserver2; the pid will still exist, but the new
     # de-registering hiveserver2; the pid will still exist, but the new
@@ -65,16 +67,20 @@ def hive_service(name, action='start', rolling_restart=False):
     if rolling_restart:
     if rolling_restart:
       process_id_exists_command = None
       process_id_exists_command = None
 
 
+      if (params.version):
+        import os
+        hadoop_home = format("/usr/hdp/{version}/hadoop")
+        hive_bin = os.path.join(params.hive_bin, hive_bin)
+
     if params.security_enabled:
     if params.security_enabled:
       hive_kinit_cmd = format("{kinit_path_local} -kt {hive_server2_keytab} {hive_principal}; ")
       hive_kinit_cmd = format("{kinit_path_local} -kt {hive_server2_keytab} {hive_principal}; ")
       Execute(hive_kinit_cmd, user=params.hive_user)
       Execute(hive_kinit_cmd, user=params.hive_user)
       
       
-    Execute(demon_cmd, 
-      user=params.hive_user,
-      environment={'HADOOP_HOME': params.hadoop_home, 'JAVA_HOME': params.java64_home},
-      path=params.execute_path,
-      not_if=process_id_exists_command
-    )
+    Execute(daemon_cmd, 
+      user = params.hive_user,
+      environment = { 'HADOOP_HOME': hadoop_home, 'JAVA_HOME': params.java64_home, 'HIVE_BIN': hive_bin },
+      path = params.execute_path,
+      not_if = process_id_exists_command)
 
 
     if params.hive_jdbc_driver == "com.mysql.jdbc.Driver" or \
     if params.hive_jdbc_driver == "com.mysql.jdbc.Driver" or \
        params.hive_jdbc_driver == "org.postgresql.Driver" or \
        params.hive_jdbc_driver == "org.postgresql.Driver" or \

+ 1 - 1
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py

@@ -39,7 +39,7 @@ class WebHCatServer(Script):
     import params
     import params
     env.set_params(params)
     env.set_params(params)
     self.configure(env) # FOR SECURITY
     self.configure(env) # FOR SECURITY
-    webhcat_service(action='start')
+    webhcat_service(action='start', rolling_restart=rolling_restart)
 
 
   def stop(self, env, rolling_restart=False):
   def stop(self, env, rolling_restart=False):
     import params
     import params

+ 17 - 10
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service.py

@@ -30,23 +30,30 @@ def webhcat_service(action='start'):
 
 
 
 
 @OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
 @OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
-def webhcat_service(action='start'):
+def webhcat_service(action='start', rolling_restart=False):
   import params
   import params
 
 
-  cmd = format('env HADOOP_HOME={hadoop_home} {webhcat_bin_dir}/webhcat_server.sh')
+  environ = {
+    'HADOOP_HOME': params.hadoop_home
+  }
+
+  cmd = format('{webhcat_bin_dir}/webhcat_server.sh')
 
 
   if action == 'start':
   if action == 'start':
-    demon_cmd = format('cd {hcat_pid_dir} ; {cmd} start')
+    if rolling_restart and params.version:
+      environ['HADOOP_HOME'] = format("/usr/hdp/{version}/hadoop")
+
+    daemon_cmd = format('cd {hcat_pid_dir} ; {cmd} start')
     no_op_test = format('ls {webhcat_pid_file} >/dev/null 2>&1 && ps -p `cat {webhcat_pid_file}` >/dev/null 2>&1')
     no_op_test = format('ls {webhcat_pid_file} >/dev/null 2>&1 && ps -p `cat {webhcat_pid_file}` >/dev/null 2>&1')
-    Execute(demon_cmd,
+    Execute(daemon_cmd,
             user=params.webhcat_user,
             user=params.webhcat_user,
-            not_if=no_op_test
-    )
+            not_if=no_op_test,
+            environment = environ)
   elif action == 'stop':
   elif action == 'stop':
-    demon_cmd = format('{cmd} stop')
-    Execute(demon_cmd,
-            user=params.webhcat_user
-    )
+    daemon_cmd = format('{cmd} stop')
+    Execute(daemon_cmd,
+            user = params.webhcat_user,
+            environment = environ)
     File(params.webhcat_pid_file,
     File(params.webhcat_pid_file,
          action="delete",
          action="delete",
     )
     )

+ 6 - 0
ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.3.xml

@@ -524,6 +524,12 @@
       </component>
       </component>
 
 
       <component name="WEBHCAT_SERVER">
       <component name="WEBHCAT_SERVER">
+        <pre-upgrade>
+          <task xsi:type="configure">
+            <type>webhcat-env</type>
+            <replace key="content" find="export HADOOP_HOME={{hadoop_home}}" replace-with="export HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}" />
+          </task>
+        </pre-upgrade>
         <upgrade>
         <upgrade>
           <task xsi:type="restart" />
           <task xsi:type="restart" />
         </upgrade>
         </upgrade>

+ 76 - 0
ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ConfigureActionTest.java

@@ -431,6 +431,82 @@ public class ConfigureActionTest {
     assertEquals("['c6401','c6402','c6403']", map.get("zoo.server.array"));
     assertEquals("['c6401','c6402','c6403']", map.get("zoo.server.array"));
   }
   }
 
 
+
+
+  @Test
+  public void testValueReplacement() throws Exception {
+    makeUpgradeCluster();
+
+    Cluster c = m_injector.getInstance(Clusters.class).getCluster("c1");
+    assertEquals(1, c.getConfigsByType("zoo.cfg").size());
+
+    c.setDesiredStackVersion(HDP_21_STACK);
+    ConfigFactory cf = m_injector.getInstance(ConfigFactory.class);
+    Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {
+      {
+        put("key_to_replace", "My New Cat");
+        put("key_with_no_match", "WxyAndZ");
+      }
+    }, new HashMap<String, Map<String, String>>());
+    config.setTag("version2");
+    config.persist();
+
+    c.addConfig(config);
+    c.addDesiredConfig("user", Collections.singleton(config));
+    assertEquals(2, c.getConfigsByType("zoo.cfg").size());
+
+    Map<String, String> commandParams = new HashMap<String, String>();
+    commandParams.put("upgrade_direction", "upgrade");
+    commandParams.put("version", HDP_2_2_1_0);
+    commandParams.put("clusterName", "c1");
+    commandParams.put(ConfigureTask.PARAMETER_CONFIG_TYPE, "zoo.cfg");
+
+    // Replacement task
+    List<ConfigureTask.Replace> replacements = new ArrayList<ConfigureTask.Replace>();
+    ConfigureTask.Replace replace = new ConfigureTask.Replace();
+    replace.key = "key_to_replace";
+    replace.find = "New Cat";
+    replace.replaceWith = "Wet Dog";
+    replacements.add(replace);
+
+    replace = new ConfigureTask.Replace();
+    replace.key = "key_with_no_match";
+    replace.find = "abc";
+    replace.replaceWith = "def";
+    replacements.add(replace);
+
+    commandParams.put(ConfigureTask.PARAMETER_REPLACEMENTS, new Gson().toJson(replacements));
+
+    ExecutionCommand executionCommand = new ExecutionCommand();
+    executionCommand.setCommandParams(commandParams);
+    executionCommand.setClusterName("c1");
+    executionCommand.setRoleParams(new HashMap<String, String>());
+    executionCommand.getRoleParams().put(ServerAction.ACTION_USER_NAME, "username");
+
+    HostRoleCommand hostRoleCommand = hostRoleCommandFactory.create(null, null, null, null);
+
+    hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(executionCommand));
+
+    ConfigureAction action = m_injector.getInstance(ConfigureAction.class);
+    action.setExecutionCommand(executionCommand);
+    action.setHostRoleCommand(hostRoleCommand);
+
+    CommandReport report = action.execute(null);
+    assertNotNull(report);
+
+    assertEquals(3, c.getConfigsByType("zoo.cfg").size());
+
+    config = c.getDesiredConfigByType("zoo.cfg");
+    assertNotNull(config);
+    assertFalse("version2".equals(config.getTag()));
+
+    assertEquals("My Wet Dog", config.getProperties().get("key_to_replace"));
+    assertEquals("WxyAndZ", config.getProperties().get("key_with_no_match"));
+
+  }
+
+
+
   private void makeUpgradeCluster() throws Exception {
   private void makeUpgradeCluster() throws Exception {
     String clusterName = "c1";
     String clusterName = "c1";
     String hostName = "h1";
     String hostName = "h1";

+ 148 - 2
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py

@@ -17,6 +17,7 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 See the License for the specific language governing permissions and
 See the License for the specific language governing permissions and
 limitations under the License.
 limitations under the License.
 '''
 '''
+import json
 import os
 import os
 from mock.mock import MagicMock, call, patch
 from mock.mock import MagicMock, call, patch
 from stacks.utils.RMFTestCase import *
 from stacks.utils.RMFTestCase import *
@@ -46,7 +47,7 @@ class TestHiveMetastore(RMFTestCase):
 
 
     self.assert_configure_default()
     self.assert_configure_default()
     self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.log /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
     self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.log /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
-        environment = {'HADOOP_HOME': '/usr', 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
+        environment = {'HADOOP_HOME': '/usr', 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45', 'HIVE_BIN': 'hive'},
         not_if = 'ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive.pid` >/dev/null 2>&1',
         not_if = 'ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive.pid` >/dev/null 2>&1',
         user = 'hive',
         user = 'hive',
         path = ['/bin:/usr/lib/hive/bin:/usr/bin'],
         path = ['/bin:/usr/lib/hive/bin:/usr/bin'],
@@ -106,7 +107,7 @@ class TestHiveMetastore(RMFTestCase):
                               user = 'hive',
                               user = 'hive',
                               )
                               )
     self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.log /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
     self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.log /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
-        environment = {'HADOOP_HOME': '/usr', 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
+        environment = {'HADOOP_HOME': '/usr', 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45', 'HIVE_BIN': 'hive'},
         not_if = 'ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive.pid` >/dev/null 2>&1',
         not_if = 'ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive.pid` >/dev/null 2>&1',
         user = 'hive',
         user = 'hive',
         path = ['/bin:/usr/lib/hive/bin:/usr/bin'],
         path = ['/bin:/usr/lib/hive/bin:/usr/bin'],
@@ -348,3 +349,148 @@ class TestHiveMetastore(RMFTestCase):
                               recursive = True,
                               recursive = True,
                               cd_access = 'a',
                               cd_access = 'a',
                               )
                               )
+
+  @patch("resource_management.core.shell.call")
+  @patch("resource_management.libraries.functions.get_hdp_version")
+  def test_start_ru(self, call_mock, get_hdp_version_mock):
+    get_hdp_version_mock.return_value = '2.3.0.0-1234'
+
+    config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
+    with open(config_file, "r") as f:
+      json_content = json.load(f)
+    
+    version = "2.3.0.0-1234"
+    json_content['commandParams']['version'] = version
+    json_content['hostLevelParams']['stack_name'] = "HDP"
+    json_content['hostLevelParams']['stack_version'] = "2.3"
+    json_content['role'] = "HIVE_SERVER"
+    json_content['configurations']['hive-site']['javax.jdo.option.ConnectionPassword'] = "aaa"
+
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_metastore.py",
+                       classname = "HiveMetastore",
+                       command = "start",
+                       command_args = [True],
+                       config_dict = json_content,
+                       hdp_stack_version = self.STACK_VERSION,
+                       target = RMFTestCase.TARGET_COMMON_SERVICES)
+
+    self.assertResourceCalled('Directory', '/etc/hive',
+                              mode = 0755)
+
+    self.assertResourceCalled('Directory', '/usr/hdp/current/hive-server2/conf',
+                              owner = 'hive',
+                              group = 'hadoop',
+                              recursive = True)
+
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              group = 'hadoop',
+                              conf_dir = '/usr/hdp/current/hive-server2/conf',
+                              mode = 0644,
+                              configuration_attributes = {u'final': {u'mapred.healthChecker.script.path': u'true',
+                                                                     u'mapreduce.jobtracker.staging.root.dir': u'true'}},
+                              owner = 'hive',
+                              configurations = self.getConfig()['configurations']['mapred-site'])
+
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/hive-default.xml.template',
+                              owner = 'hive',
+                              group = 'hadoop',
+                              )
+
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/hive-env.sh.template',
+                              owner = 'hive',
+                              group = 'hadoop')
+
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/hive-exec-log4j.properties',
+      content = 'log4jproperties\nline2',
+      mode = 420,
+      group = 'hadoop',
+      owner = 'hive')
+
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/hive-log4j.properties',
+      content = 'log4jproperties\nline2',
+      mode = 420,
+      group = 'hadoop',
+      owner = 'hive')
+
+    self.assertResourceCalled('XmlConfig', 'hive-site.xml',
+                              group = 'hadoop',
+                              conf_dir = '/usr/hdp/current/hive-server2/conf/conf.server',
+                              mode = 0644,
+                              configuration_attributes = {u'final': {u'hive.optimize.bucketmapjoin.sortedmerge': u'true',
+                                                                     u'javax.jdo.option.ConnectionDriverName': u'true',
+                                                                     u'javax.jdo.option.ConnectionPassword': u'true'}},
+                              owner = 'hive',
+                              configurations = self.getConfig()['configurations']['hive-site'])
+
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/conf.server/hive-env.sh',
+                              content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
+                              owner = 'hive',
+                              group = 'hadoop')
+
+    self.assertResourceCalled('Directory', '/etc/security/limits.d',
+                              owner = 'root',
+                              group = 'root',
+                              recursive = True)
+
+    self.assertResourceCalled('File', '/etc/security/limits.d/hive.conf',
+                              content = Template('hive.conf.j2'),
+                              owner = 'root',
+                              group = 'root',
+                              mode = 0644)
+
+    self.assertResourceCalled('Execute', ('cp',
+                                          '--remove-destination',
+                                          '/usr/share/java/mysql-connector-java.jar',
+                                          '/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar'),
+                              path = ['/bin', '/usr/bin/'],
+                              sudo = True)
+
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar',
+        mode = 0644)
+
+    self.assertResourceCalled('File', '/usr/lib/ambari-agent/DBConnectionVerification.jar',
+        content = DownloadSource('http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar'))
+
+    self.assertResourceCalled('File', '/tmp/start_metastore_script',
+                              content = StaticFile('startMetastore.sh'),
+                              mode = 0755)
+
+    self.maxDiff = None
+
+    self.assertResourceCalled('Execute', 'export HIVE_CONF_DIR=/usr/hdp/current/hive-server2/conf/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -initSchema -dbType mysql -userName hive -passWord aaa',
+        not_if = "ambari-sudo.sh su hive -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]export HIVE_CONF_DIR=/usr/hdp/current/hive-server2/conf/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -info -dbType mysql -userName hive -passWord aaa'",
+        user = 'hive')
+
+    self.assertResourceCalled('Directory', '/var/run/hive',
+                              owner = 'hive',
+                              group = 'hadoop',
+                              mode = 0755,
+                              recursive = True,
+                              cd_access = 'a')
+
+    self.assertResourceCalled('Directory', '/var/log/hive',
+                              owner = 'hive',
+                              group = 'hadoop',
+                              mode = 0755,
+                              recursive = True,
+                              cd_access = 'a')
+
+    self.assertResourceCalled('Directory', '/var/lib/hive',
+                              owner = 'hive',
+                              group = 'hadoop',
+                              mode = 0755,
+                              recursive = True,
+                              cd_access = 'a')
+
+    self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.log /var/run/hive/hive.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
+        environment = {'HADOOP_HOME': '/usr/hdp/2.3.0.0-1234/hadoop', 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45', 'HIVE_BIN': '/usr/hdp/current/hive-server2/bin/hive'},
+        not_if = None,
+        user = 'hive',
+        path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/hdp/current/hadoop-client/bin'])
+
+    self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive aaa com.mysql.jdbc.Driver',
+        path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+        tries = 5,
+        try_sleep = 10)
+
+    self.assertNoMoreResources()

+ 5 - 5
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py

@@ -69,7 +69,7 @@ class TestHiveServer(RMFTestCase):
     )
     )
     self.assertResourceCalled('Execute',
     self.assertResourceCalled('Execute',
                               '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
                               '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
-                              environment={'HADOOP_HOME': '/usr', 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
+                              environment={'HADOOP_HOME': '/usr', 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45', 'HIVE_BIN': 'hive'},
                               not_if='ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
                               user='hive',
                               user='hive',
                               path=['/bin:/usr/lib/hive/bin:/usr/bin']
                               path=['/bin:/usr/lib/hive/bin:/usr/bin']
@@ -106,7 +106,7 @@ class TestHiveServer(RMFTestCase):
                               )
                               )
     self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
     self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
                               not_if = 'ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
                               not_if = 'ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
-                              environment = {'HADOOP_HOME' : '/usr', 'JAVA_HOME':'/usr/jdk64/jdk1.7.0_45'},
+                              environment={'HADOOP_HOME': '/usr', 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45', 'HIVE_BIN': 'hive'},
                               path = ["/bin:/usr/lib/hive/bin:/usr/bin"],
                               path = ["/bin:/usr/lib/hive/bin:/usr/bin"],
                               user = 'hive'
                               user = 'hive'
     )
     )
@@ -139,7 +139,7 @@ class TestHiveServer(RMFTestCase):
                               )
                               )
     self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
     self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
                               not_if = 'ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
                               not_if = 'ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
-                              environment = {'HADOOP_HOME' : '/usr', 'JAVA_HOME':'/usr/jdk64/jdk1.7.0_45'},
+                              environment={'HADOOP_HOME': '/usr', 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45', 'HIVE_BIN': 'hive'},
                               path = ["/bin:/usr/lib/hive/bin:/usr/bin"],
                               path = ["/bin:/usr/lib/hive/bin:/usr/bin"],
                               user = 'hive'
                               user = 'hive'
     )
     )
@@ -173,7 +173,7 @@ class TestHiveServer(RMFTestCase):
                               )
                               )
     self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
     self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
                               not_if = 'ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
                               not_if = 'ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
-                              environment = {'HADOOP_HOME' : '/usr', 'JAVA_HOME':'/usr/jdk64/jdk1.7.0_45'},
+                              environment={'HADOOP_HOME': '/usr', 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45', 'HIVE_BIN': 'hive'},
                               path = ["/bin:/usr/lib/hive/bin:/usr/bin"],
                               path = ["/bin:/usr/lib/hive/bin:/usr/bin"],
                               user = 'hive'
                               user = 'hive'
     )
     )
@@ -247,7 +247,7 @@ class TestHiveServer(RMFTestCase):
     )
     )
     self.assertResourceCalled('Execute',
     self.assertResourceCalled('Execute',
                               '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
                               '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
-                              environment={'HADOOP_HOME': '/usr', 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
+                              environment={'HADOOP_HOME': '/usr', 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45', 'HIVE_BIN': 'hive'},
                               not_if='ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
                               not_if='ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
                               user='hive',
                               user='hive',
                               path=['/bin:/usr/lib/hive/bin:/usr/bin'],
                               path=['/bin:/usr/lib/hive/bin:/usr/bin'],

+ 10 - 6
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py

@@ -48,9 +48,10 @@ class TestWebHCatServer(RMFTestCase):
     )
     )
 
 
     self.assert_configure_default()
     self.assert_configure_default()
-    self.assertResourceCalled('Execute', 'cd /var/run/webhcat ; env HADOOP_HOME=/usr /usr/lib/hcatalog/sbin/webhcat_server.sh start',
+    self.assertResourceCalled('Execute', 'cd /var/run/webhcat ; /usr/lib/hcatalog/sbin/webhcat_server.sh start',
                               not_if = 'ls /var/run/webhcat/webhcat.pid >/dev/null 2>&1 && ps -p `cat /var/run/webhcat/webhcat.pid` >/dev/null 2>&1',
                               not_if = 'ls /var/run/webhcat/webhcat.pid >/dev/null 2>&1 && ps -p `cat /var/run/webhcat/webhcat.pid` >/dev/null 2>&1',
-                              user = 'hcat'
+                              user = 'hcat',
+                              environment = {'HADOOP_HOME': '/usr' }
     )
     )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
@@ -63,8 +64,9 @@ class TestWebHCatServer(RMFTestCase):
                        target = RMFTestCase.TARGET_COMMON_SERVICES
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
     )
 
 
-    self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr /usr/lib/hcatalog/sbin/webhcat_server.sh stop',
+    self.assertResourceCalled('Execute', '/usr/lib/hcatalog/sbin/webhcat_server.sh stop',
                               user = 'hcat',
                               user = 'hcat',
+                              environment = {'HADOOP_HOME': '/usr' }
                               )
                               )
     self.assertResourceCalled('File', '/var/run/webhcat/webhcat.pid',
     self.assertResourceCalled('File', '/var/run/webhcat/webhcat.pid',
         action = ['delete'],
         action = ['delete'],
@@ -93,9 +95,10 @@ class TestWebHCatServer(RMFTestCase):
     )
     )
 
 
     self.assert_configure_secured()
     self.assert_configure_secured()
-    self.assertResourceCalled('Execute', 'cd /var/run/webhcat ; env HADOOP_HOME=/usr /usr/lib/hcatalog/sbin/webhcat_server.sh start',
+    self.assertResourceCalled('Execute', 'cd /var/run/webhcat ; /usr/lib/hcatalog/sbin/webhcat_server.sh start',
                               not_if = 'ls /var/run/webhcat/webhcat.pid >/dev/null 2>&1 && ps -p `cat /var/run/webhcat/webhcat.pid` >/dev/null 2>&1',
                               not_if = 'ls /var/run/webhcat/webhcat.pid >/dev/null 2>&1 && ps -p `cat /var/run/webhcat/webhcat.pid` >/dev/null 2>&1',
-                              user = 'hcat'
+                              user = 'hcat',
+                              environment = {'HADOOP_HOME': '/usr' }
     )
     )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
@@ -108,8 +111,9 @@ class TestWebHCatServer(RMFTestCase):
                        target = RMFTestCase.TARGET_COMMON_SERVICES
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
     )
 
 
-    self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr /usr/lib/hcatalog/sbin/webhcat_server.sh stop',
+    self.assertResourceCalled('Execute', '/usr/lib/hcatalog/sbin/webhcat_server.sh stop',
                               user = 'hcat',
                               user = 'hcat',
+                              environment = {'HADOOP_HOME': '/usr' }
                               )
                               )
     self.assertResourceCalled('File', '/var/run/webhcat/webhcat.pid',
     self.assertResourceCalled('File', '/var/run/webhcat/webhcat.pid',
         action = ['delete'],
         action = ['delete'],

+ 3 - 3
ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py

@@ -48,7 +48,7 @@ class TestHiveMetastore(RMFTestCase):
     self.assert_configure_default()
     self.assert_configure_default()
 
 
     self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.log /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
     self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.log /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
-        environment = {'HADOOP_HOME': '/usr', 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
+        environment = {'HADOOP_HOME': '/usr', 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45', 'HIVE_BIN': 'hive'},
         not_if = 'ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive.pid` >/dev/null 2>&1',
         not_if = 'ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive.pid` >/dev/null 2>&1',
         user = 'hive',
         user = 'hive',
         path = ['/bin:/usr/lib/hive/bin:/usr/bin'],
         path = ['/bin:/usr/lib/hive/bin:/usr/bin'],
@@ -76,7 +76,7 @@ class TestHiveMetastore(RMFTestCase):
     self.assert_configure_default()
     self.assert_configure_default()
 
 
     self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.log /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
     self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.log /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
-        environment = {'HADOOP_HOME': '/usr', 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
+        environment = {'HADOOP_HOME': '/usr', 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45', 'HIVE_BIN': 'hive'},
         not_if = 'ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive.pid` >/dev/null 2>&1',
         not_if = 'ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive.pid` >/dev/null 2>&1',
         user = 'hive',
         user = 'hive',
         path = ['/bin:/usr/lib/hive/bin:/usr/bin'],
         path = ['/bin:/usr/lib/hive/bin:/usr/bin'],
@@ -139,7 +139,7 @@ class TestHiveMetastore(RMFTestCase):
                               )
                               )
 
 
     self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.log /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
     self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.log /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
-        environment = {'HADOOP_HOME': '/usr', 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
+        environment = {'HADOOP_HOME': '/usr', 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45', 'HIVE_BIN': 'hive'},
         not_if = 'ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive.pid` >/dev/null 2>&1',
         not_if = 'ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p `cat /var/run/hive/hive.pid` >/dev/null 2>&1',
         user = 'hive',
         user = 'hive',
         path = ['/bin:/usr/lib/hive/bin:/usr/bin'],
         path = ['/bin:/usr/lib/hive/bin:/usr/bin'],

+ 4 - 4
ambari-server/src/test/python/stacks/utils/RMFTestCase.py

@@ -69,8 +69,8 @@ class RMFTestCase(TestCase):
                     os_env={'PATH':'/bin'},
                     os_env={'PATH':'/bin'},
                     target=TARGET_STACKS,
                     target=TARGET_STACKS,
                     mocks_dict={},
                     mocks_dict={},
-                    try_install=False
-                    ):
+                    try_install=False,
+                    command_args=[]):
     norm_path = os.path.normpath(path)
     norm_path = os.path.normpath(path)
     src_dir = RMFTestCase.get_src_folder()
     src_dir = RMFTestCase.get_src_folder()
     if target == self.TARGET_STACKS:
     if target == self.TARGET_STACKS:
@@ -143,9 +143,9 @@ class RMFTestCase(TestCase):
                   with patch.object(os, "environ", new=os_env) as mocks_dict['environ']:
                   with patch.object(os, "environ", new=os_env) as mocks_dict['environ']:
                     if not try_install:
                     if not try_install:
                       with patch.object(Script, 'install_packages') as install_mock_value:
                       with patch.object(Script, 'install_packages') as install_mock_value:
-                        method(RMFTestCase.env)
+                        method(RMFTestCase.env, *command_args)
                     else:
                     else:
-                      method(RMFTestCase.env)
+                      method(RMFTestCase.env, *command_args)
 
 
     sys.path.remove(scriptsdir)
     sys.path.remove(scriptsdir)