Browse Source

AMBARI-1647. Integrate server and agent changes for upgrade on cluster. (Sumit Mohanty via swagle)

git-svn-id: https://svn.apache.org/repos/asf/incubator/ambari/trunk@1457194 13f79535-47bb-0310-9956-ffa450edef68
Siddharth Wagle 12 years ago
parent
commit
5fc6ba4959
50 changed files with 1216 additions and 127 deletions
  1. 3 0
      CHANGES.txt
  2. 2 2
      ambari-agent/pom.xml
  3. 3 2
      ambari-agent/src/main/python/ambari_agent/ActionQueue.py
  4. 1 1
      ambari-agent/src/main/python/ambari_agent/PuppetExecutor.py
  5. 75 6
      ambari-agent/src/main/python/ambari_agent/PythonExecutor.py
  6. 1 1
      ambari-agent/src/main/python/ambari_agent/StackVersionsFileHandler.py
  7. 21 12
      ambari-agent/src/main/python/ambari_agent/UpgradeExecutor.py
  8. 1 1
      ambari-agent/src/main/upgrade_scripts/stub
  9. 22 0
      ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/DATANODE/upgrade.d/50-dummy.pp
  10. 19 0
      ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/GANGLIA_MONITOR/stub.txt
  11. 19 0
      ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/GANGLIA_SERVER/stub.txt
  12. 19 0
      ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/HADOOP_CLIENT/stub.txt
  13. 19 0
      ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/HBASE_CLIENT/stub.txt
  14. 19 0
      ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/HBASE_MASTER/stub.txt
  15. 19 0
      ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/HBASE_REGIONSERVER/stub.txt
  16. 19 0
      ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/HCAT/stub.txt
  17. 22 0
      ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/HDFS_CLIENT/upgrade.d/50-dummy.pp
  18. 19 0
      ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/HIVE_CLIENT/stub.txt
  19. 19 0
      ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/HIVE_METASTORE/stub.txt
  20. 19 0
      ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/HIVE_SERVER/stub.txt
  21. 19 0
      ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/HUE_SERVER/stub.txt
  22. 22 0
      ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/JOBTRACKER/upgrade.d/50-dummy.pp
  23. 22 0
      ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/MAPREDUCE_CLIENT/upgrade.d/50-dummy.pp
  24. 19 0
      ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/MONITOR_WEBSERVER/stub.txt
  25. 19 0
      ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/MYSQL_SERVER/stub.txt
  26. 19 0
      ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/NAGIOS_SERVER/stub.txt
  27. 22 0
      ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/NAMENODE/upgrade.d/50-dummy.pp
  28. 19 0
      ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/OOZIE_CLIENT/stub.txt
  29. 19 0
      ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/OOZIE_SERVER/stub.txt
  30. 19 0
      ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/PIG/stub.txt
  31. 22 0
      ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/SECONDARY_NAMENODE/upgrade.d/50-dummy.pp
  32. 19 0
      ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/SQOOP/stub.txt
  33. 22 0
      ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/TASKTRACKER/upgrade.d/50-dummy.pp
  34. 19 0
      ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/WEBHCAT_SERVER/stub.txt
  35. 19 0
      ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/ZOOKEEPER_CLIENT/stub.txt
  36. 19 0
      ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/ZOOKEEPER_SERVER/stub.txt
  37. 1 0
      ambari-agent/src/test/python/TestActionQueue.py
  38. 1 1
      ambari-agent/src/test/python/TestLiveStatus.py
  39. 0 1
      ambari-agent/src/test/python/TestPuppetExecutor.py
  40. 145 0
      ambari-agent/src/test/python/TestPythonExecutor.py
  41. 8 6
      ambari-agent/src/test/python/TestStackVersionsFileHandler.py
  42. 16 16
      ambari-agent/src/test/python/TestUpgradeExecutor.py
  43. 4 4
      ambari-agent/src/test/python/dummy_files/dummy_current_stack
  44. 222 0
      ambari-agent/src/test/python/examples/ControllerTester.py
  45. 45 44
      ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
  46. 4 4
      ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
  47. 75 16
      ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
  48. 8 5
      ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
  49. 4 4
      ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java
  50. 23 1
      ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java

+ 3 - 0
CHANGES.txt

@@ -12,6 +12,9 @@ Trunk (unreleased changes):
 
 
  NEW FEATURES
  NEW FEATURES
 
 
+ AMBARI-1647. Integrate server and agent changes for upgrade on cluster. 
+ (Sumit Mohanty via swagle)
+
  AMBARI-1626. API support to upgrade host component. (Sumit Mohanty via swagle)
  AMBARI-1626. API support to upgrade host component. (Sumit Mohanty via swagle)
 
 
  AMBARI-1601. Server level action support. (Sumit Mohanty via swagle)
  AMBARI-1601. Server level action support. (Sumit Mohanty via swagle)

+ 2 - 2
ambari-agent/pom.xml

@@ -204,13 +204,13 @@
               </sources>
               </sources>
             </mapping>
             </mapping>
             <mapping>
             <mapping>
-              <directory>/var/lib/${project.artifactId}/upgrade_scripts</directory>
+              <directory>/var/lib/${project.artifactId}/upgrade_stack</directory>
               <filemode>755</filemode>
               <filemode>755</filemode>
               <username>root</username>
               <username>root</username>
               <groupname>root</groupname>
               <groupname>root</groupname>
               <sources>
               <sources>
                 <source>
                 <source>
-                  <location>src/main/upgrade_scripts</location>
+                  <location>src/main/upgrade_stack</location>
                 </source>
                 </source>
               </sources>
               </sources>
             </mapping>
             </mapping>

+ 3 - 2
ambari-agent/src/main/python/ambari_agent/ActionQueue.py

@@ -67,9 +67,10 @@ class ActionQueue(threading.Thread):
                                    config.get('puppet', 'puppet_home'),
                                    config.get('puppet', 'puppet_home'),
                                    config.get('puppet', 'facter_home'),
                                    config.get('puppet', 'facter_home'),
                                    config.get('agent', 'prefix'), config)
                                    config.get('agent', 'prefix'), config)
-    self.pythonExecutor = PythonExecutor.PythonExecutor()
+    self.pythonExecutor = PythonExecutor.PythonExecutor(
+                                   config.get('agent', 'prefix'), config)
     self.upgradeExecutor = UpgradeExecutor.UpgradeExecutor(self.pythonExecutor,
     self.upgradeExecutor = UpgradeExecutor.UpgradeExecutor(self.pythonExecutor,
-                                   PuppetExecutor, config)
+                                   self.puppetExecutor, config)
     self.tmpdir = config.get('agent', 'prefix')
     self.tmpdir = config.get('agent', 'prefix')
     self.commandInProgress = None
     self.commandInProgress = None
 
 

+ 1 - 1
ambari-agent/src/main/python/ambari_agent/PuppetExecutor.py

@@ -115,7 +115,7 @@ class PuppetExecutor:
     if command.has_key("taskId"):
     if command.has_key("taskId"):
       taskId = command['taskId']
       taskId = command['taskId']
     #Install repos
     #Install repos
-    self.deployRepos(command, self.tmpDir, self.modulesdir, command.taskId)
+    self.deployRepos(command, self.tmpDir, self.modulesdir, taskId)
     puppetEnv = os.environ
     puppetEnv = os.environ
     self.runPuppetFile(file, result, puppetEnv, tmpout, tmperr)
     self.runPuppetFile(file, result, puppetEnv, tmpout, tmperr)
     if self.isSuccessfull(result["exitcode"]):
     if self.isSuccessfull(result["exitcode"]):

+ 75 - 6
ambari-agent/src/main/python/ambari_agent/PythonExecutor.py

@@ -30,14 +30,83 @@ logger = logging.getLogger()
 
 
 class PythonExecutor:
 class PythonExecutor:
 
 
-  def __init__(self):
+  # How many seconds will pass before running puppet is terminated on timeout
+  PYTHON_TIMEOUT_SECONDS = 600
+
+  NO_ERROR = "none"
+  grep = Grep()
+  event = threading.Event()
+  python_process_has_been_killed = False
+
+  def __init__(self, tmpDir, config):
+    self.tmpDir = tmpDir
+    self.config = config
     pass
     pass
 
 
-  def run_file(self, name, stdout, stderr):
+  def run_file(self, command, file, tmpoutfile, tmperrfile):
+    """
+    Executes the specified python file in a separate subprocess.
+    Method returns only when the subprocess is finished.
+    """
+    tmpout =  open(tmpoutfile, 'w')
+    tmperr =  open(tmperrfile, 'w')
+    pythonCommand = self.pythonCommand(file)
+    logger.info("Running command " + pprint.pformat(pythonCommand))
+    process = self.lauch_python_subprocess(pythonCommand, tmpout, tmperr)
+    logger.info("Launching watchdog thread")
+    self.event.clear()
+    self.python_process_has_been_killed = False
+    thread = Thread(target =  self.python_watchdog_func, args = (process, ))
+    thread.start()
+    # Waiting for process to finished or killed
+    process.communicate()
+    self.event.set()
+    thread.join()
+    # Building results
+    error = self.NO_ERROR
+    returncode = process.returncode
+    out = open(tmpoutfile, 'r').read()
+    error = open(tmperrfile, 'r').read()
+    if self.python_process_has_been_killed:
+      error = str(error) + "\n Puppet has been killed due to timeout"
+      returncode = 999
+    result = self.condenseOutput(out, error, returncode)
+    logger.info("Result: %s" % result)
+    return result
+
+
+  def lauch_python_subprocess(self, command, tmpout, tmperr):
     """
     """
-    Executes the file specified in a separate subprocess.
-    Method returns only when the subprocess is finished or timeout is exceeded
+    Creates subprocess with given parameters. This functionality was moved to separate method
+    to make possible unit testing
     """
     """
-    # TODO: implement
-    logger.warn("TODO: Python file execution is not supported yet")
+    return subprocess.Popen(command,
+      stdout=tmpout,
+      stderr=tmperr)
+
+  def isSuccessfull(self, returncode):
+    return not self.python_process_has_been_killed and returncode == 0
+
+  def pythonCommand(self, file):
+    puppetcommand = ['python', file]
+    return puppetcommand
+
+  def condenseOutput(self, stdout, stderr, retcode):
+    grep = self.grep
+    result = {
+      "exitcode": retcode,
+      "stdout"  : grep.tail(stdout, grep.OUTPUT_LAST_LINES),
+      "stderr"  : grep.tail(stderr, grep.OUTPUT_LAST_LINES)
+    }
+    return result
+
+  def python_watchdog_func(self, python):
+    self.event.wait(self.PYTHON_TIMEOUT_SECONDS)
+    if python.returncode is None:
+      logger.error("Subprocess timed out and will be killed")
+      self.runShellKillPgrp(python)
+      self.python_process_has_been_killed = True
     pass
     pass
+
+  def runShellKillPgrp(self, python):
+    shell.killprocessgrp(python.pid)

+ 1 - 1
ambari-agent/src/main/python/ambari_agent/StackVersionsFileHandler.py

@@ -80,7 +80,7 @@ class StackVersionsFileHandler:
       logger.info("Writing new stack versions file")
       logger.info("Writing new stack versions file")
       with open (self.versionsFilePath, 'w') as f:
       with open (self.versionsFilePath, 'w') as f:
         for key in values:
         for key in values:
-          f.write ("%s\t%s\n" % (key, values))
+          f.write ("%s\t%s\n" % (key, values[key]))
 
 
     except Exception, err:
     except Exception, err:
       logger.error("Can't write new stack version (%s %s) :%s " % (component,
       logger.error("Can't write new stack version (%s %s) :%s " % (component,

+ 21 - 12
ambari-agent/src/main/python/ambari_agent/UpgradeExecutor.py

@@ -30,7 +30,7 @@ import shell
 import traceback
 import traceback
 from Grep import Grep
 from Grep import Grep
 from StackVersionsFileHandler import StackVersionsFileHandler
 from StackVersionsFileHandler import StackVersionsFileHandler
-import re
+import re, json
 
 
 logger = logging.getLogger()
 logger = logging.getLogger()
 grep = Grep()
 grep = Grep()
@@ -61,10 +61,10 @@ class UpgradeExecutor:
     params = command['commandParams']
     params = command['commandParams']
     srcStack = params['source_stack_version']
     srcStack = params['source_stack_version']
     tgtStack = params['target_stack_version']
     tgtStack = params['target_stack_version']
-    component = command['component']
+    component = command['role']
 
 
     srcStackTuple = self.split_stack_version(srcStack)
     srcStackTuple = self.split_stack_version(srcStack)
-    tgtStackTuple = self.split_stack_version(srcStack)
+    tgtStackTuple = self.split_stack_version(tgtStack)
 
 
     if srcStackTuple is None or tgtStackTuple is None:
     if srcStackTuple is None or tgtStackTuple is None:
       errorstr = "Source (%s) or target (%s) version does not match pattern \
       errorstr = "Source (%s) or target (%s) version does not match pattern \
@@ -81,7 +81,8 @@ class UpgradeExecutor:
       # Check stack version (do we need upgrade?)
       # Check stack version (do we need upgrade?)
       basedir = os.path.join(self.stacksDir, upgradeId, component)
       basedir = os.path.join(self.stacksDir, upgradeId, component)
       if not os.path.isdir(basedir):
       if not os.path.isdir(basedir):
-        errorstr = "Upgrade %s is not supported" % upgradeId
+        errorstr = "Upgrade %s is not supported (dir %s does not exist)" \
+                   % (upgradeId, basedir)
         logger.error(errorstr)
         logger.error(errorstr)
         result = {
         result = {
           'exitcode' : 1,
           'exitcode' : 1,
@@ -138,10 +139,12 @@ class UpgradeExecutor:
 
 
 
 
   def split_stack_version(self, verstr):
   def split_stack_version(self, verstr):
-    matchObj = re.match( r'^(.*)-(\d+).(\d+)', verstr.strip(), re.M|re.I)
-    stack_name = matchObj.group(1)
-    stack_major_ver = matchObj.group(2)
-    stack_minor_ver = matchObj.group(3)
+    verdict = json.loads(verstr)
+    stack_name = verdict["stackName"].strip()
+
+    matchObj = re.match( r'(\d+).(\d+)', verdict["stackVersion"].strip(), re.M|re.I)
+    stack_major_ver = matchObj.group(1)
+    stack_minor_ver = matchObj.group(2)
     if matchObj:
     if matchObj:
       return stack_name, stack_major_ver, stack_minor_ver
       return stack_name, stack_major_ver, stack_minor_ver
     else:
     else:
@@ -156,8 +159,14 @@ class UpgradeExecutor:
     dirpath = os.path.join(basedir, dir)
     dirpath = os.path.join(basedir, dir)
     logger.info("Executing %s" % dirpath)
     logger.info("Executing %s" % dirpath)
     if not os.path.isdir(dirpath):
     if not os.path.isdir(dirpath):
-      logger.warn("Script directory %s does not exist, skipping")
-      return
+      warnstr = "Script directory %s does not exist, skipping" % dirpath
+      logger.warn(warnstr)
+      result = {
+        'exitcode' : 0,
+        'stdout'   : warnstr,
+        'stderr'   : 'None'
+      }
+      return result
     fileList=os.listdir(dirpath)
     fileList=os.listdir(dirpath)
     fileList.sort(key = self.get_key_func)
     fileList.sort(key = self.get_key_func)
     formattedResult = {
     formattedResult = {
@@ -172,11 +181,11 @@ class UpgradeExecutor:
       filepath = os.path.join(dirpath, filename)
       filepath = os.path.join(dirpath, filename)
       if filename.endswith(".pp"):
       if filename.endswith(".pp"):
         logger.info("Running puppet file %s" % filepath)
         logger.info("Running puppet file %s" % filepath)
-        result = self.puppetExecutor.just_run_one_file(command, filename,
+        result = self.puppetExecutor.just_run_one_file(command, filepath,
                                                                 tmpout, tmperr)
                                                                 tmpout, tmperr)
       elif filename.endswith(".py"):
       elif filename.endswith(".py"):
         logger.info("Running python file %s" % filepath)
         logger.info("Running python file %s" % filepath)
-        result = self.pythonExecutor.run_file(filepath, tmpout, tmperr)
+        result = self.pythonExecutor.run_file(command, filepath, tmpout, tmperr)
       elif filename.endswith(".pyc"):
       elif filename.endswith(".pyc"):
         pass # skipping compiled files
         pass # skipping compiled files
       else:
       else:

+ 1 - 1
ambari-agent/src/main/upgrade_scripts/stub

@@ -18,4 +18,4 @@ See the License for the specific language governing permissions and
 limitations under the License.
 limitations under the License.
 '''
 '''
 
 
-Will be replaced with a real folder
+#Will be replaced with a real folder

+ 22 - 0
ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/DATANODE/upgrade.d/50-dummy.pp

@@ -0,0 +1,22 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+notify {"Dummy notification":}

+ 19 - 0
ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/GANGLIA_MONITOR/stub.txt

@@ -0,0 +1,19 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Replace the folder content with actual files required for upgrade.

+ 19 - 0
ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/GANGLIA_SERVER/stub.txt

@@ -0,0 +1,19 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Replace the folder content with actual files required for upgrade.

+ 19 - 0
ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/HADOOP_CLIENT/stub.txt

@@ -0,0 +1,19 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Replace the folder content with actual files required for upgrade.

+ 19 - 0
ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/HBASE_CLIENT/stub.txt

@@ -0,0 +1,19 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Replace the folder content with actual files required for upgrade.

+ 19 - 0
ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/HBASE_MASTER/stub.txt

@@ -0,0 +1,19 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Replace the folder content with actual files required for upgrade.

+ 19 - 0
ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/HBASE_REGIONSERVER/stub.txt

@@ -0,0 +1,19 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Replace the folder content with actual files required for upgrade.

+ 19 - 0
ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/HCAT/stub.txt

@@ -0,0 +1,19 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Replace the folder content with actual files required for upgrade.

+ 22 - 0
ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/HDFS_CLIENT/upgrade.d/50-dummy.pp

@@ -0,0 +1,22 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+notify {"Dummy notification":}

+ 19 - 0
ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/HIVE_CLIENT/stub.txt

@@ -0,0 +1,19 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Replace the folder content with actual files required for upgrade.

+ 19 - 0
ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/HIVE_METASTORE/stub.txt

@@ -0,0 +1,19 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Replace the folder content with actual files required for upgrade.

+ 19 - 0
ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/HIVE_SERVER/stub.txt

@@ -0,0 +1,19 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Replace the folder content with actual files required for upgrade.

+ 19 - 0
ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/HUE_SERVER/stub.txt

@@ -0,0 +1,19 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Replace the folder content with actual files required for upgrade.

+ 22 - 0
ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/JOBTRACKER/upgrade.d/50-dummy.pp

@@ -0,0 +1,22 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+notify {"Dummy notification":}

+ 22 - 0
ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/MAPREDUCE_CLIENT/upgrade.d/50-dummy.pp

@@ -0,0 +1,22 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+notify {"Dummy notification":}

+ 19 - 0
ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/MONITOR_WEBSERVER/stub.txt

@@ -0,0 +1,19 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Replace the folder content with actual files required for upgrade.

+ 19 - 0
ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/MYSQL_SERVER/stub.txt

@@ -0,0 +1,19 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Replace the folder content with actual files required for upgrade.

+ 19 - 0
ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/NAGIOS_SERVER/stub.txt

@@ -0,0 +1,19 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Replace the folder content with actual files required for upgrade.

+ 22 - 0
ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/NAMENODE/upgrade.d/50-dummy.pp

@@ -0,0 +1,22 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+notify {"Dummy notification":}

+ 19 - 0
ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/OOZIE_CLIENT/stub.txt

@@ -0,0 +1,19 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Replace the folder content with actual files required for upgrade.

+ 19 - 0
ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/OOZIE_SERVER/stub.txt

@@ -0,0 +1,19 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Replace the folder content with actual files required for upgrade.

+ 19 - 0
ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/PIG/stub.txt

@@ -0,0 +1,19 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Replace the folder content with actual files required for upgrade.

+ 22 - 0
ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/SECONDARY_NAMENODE/upgrade.d/50-dummy.pp

@@ -0,0 +1,22 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+notify {"Dummy notification":}

+ 19 - 0
ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/SQOOP/stub.txt

@@ -0,0 +1,19 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Replace the folder content with actual files required for upgrade.

+ 22 - 0
ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/TASKTRACKER/upgrade.d/50-dummy.pp

@@ -0,0 +1,22 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+notify {"Dummy notification":}

+ 19 - 0
ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/WEBHCAT_SERVER/stub.txt

@@ -0,0 +1,19 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Replace the folder content with actual files required for upgrade.

+ 19 - 0
ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/ZOOKEEPER_CLIENT/stub.txt

@@ -0,0 +1,19 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Replace the folder content with actual files required for upgrade.

+ 19 - 0
ambari-agent/src/main/upgrade_stack/HDP-1.2_HDP-1.3/ZOOKEEPER_SERVER/stub.txt

@@ -0,0 +1,19 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Replace the folder content with actual files required for upgrade.

+ 1 - 0
ambari-agent/src/test/python/TestActionQueue.py

@@ -203,6 +203,7 @@ class TestActionQueue(TestCase):
     queue.put(statusCommand)
     queue.put(statusCommand)
     queue.run()
     queue.run()
     returned_result = queue.resultQueue.get()
     returned_result = queue.resultQueue.get()
+    returned_result[1]['status'] = 'INSTALLED' # Patch live value
     self.assertEquals(returned_result, ('STATUS_COMMAND',
     self.assertEquals(returned_result, ('STATUS_COMMAND',
                                         {'clusterName': '',
                                         {'clusterName': '',
                                          'componentName': 'DATANODE',
                                          'componentName': 'DATANODE',

+ 1 - 1
ambari-agent/src/test/python/TestLiveStatus.py

@@ -35,6 +35,6 @@ class TestLiveStatus(TestCase):
       print "LiveStatus of {0}: {1}".format(component['serviceName'], str(result))
       print "LiveStatus of {0}: {1}".format(component['serviceName'], str(result))
       self.assertEquals(len(result) > 0, True, 'Livestatus should not be empty')
       self.assertEquals(len(result) > 0, True, 'Livestatus should not be empty')
       if component['componentName'] == 'GANGLIA_SERVER':
       if component['componentName'] == 'GANGLIA_SERVER':
-        self.assertEquals(result['stackVersion'],'HDP-1.2.2',
+        self.assertEquals(result['stackVersion'],'{"stackName":"HDP","stackVersion":"1.2.2"}',
                       'Livestatus should contain component stack version')
                       'Livestatus should contain component stack version')
   
   

+ 0 - 1
ambari-agent/src/test/python/TestPuppetExecutor.py

@@ -128,7 +128,6 @@ class TestPuppetExecutor(TestCase):
     time.sleep(0.1)
     time.sleep(0.1)
     subproc_mock.should_finish_event.set()
     subproc_mock.should_finish_event.set()
     subproc_mock.finished_event.wait()
     subproc_mock.finished_event.wait()
-    print(subproc_mock.was_terminated)
     self.assertEquals(subproc_mock.was_terminated, False, "Subprocess should not be terminated before timeout")
     self.assertEquals(subproc_mock.was_terminated, False, "Subprocess should not be terminated before timeout")
     self.assertEquals(subproc_mock.returncode, 0, "Subprocess should not be terminated before timeout")
     self.assertEquals(subproc_mock.returncode, 0, "Subprocess should not be terminated before timeout")
 
 

+ 145 - 0
ambari-agent/src/test/python/TestPythonExecutor.py

@@ -0,0 +1,145 @@
+#!/usr/bin/env python2.6
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+import pprint
+
+from unittest import TestCase
+import threading
+import tempfile
+import time
+from threading import Thread
+
+from PythonExecutor import PythonExecutor
+from AmbariConfig import AmbariConfig
+from mock.mock import MagicMock
+
+
+class TestPythonExecutor(TestCase):
+
+
+  def test_watchdog_1(self):
+    """
+    Tests whether watchdog works
+    """
+    subproc_mock = self.Subprocess_mockup()
+    executor = PythonExecutor("/tmp", AmbariConfig().getConfig())
+    _, tmpoutfile = tempfile.mkstemp()
+    _, tmperrfile = tempfile.mkstemp()
+    executor.PYTHON_TIMEOUT_SECONDS = 0.1
+
+    def lauch_python_subprocess_method(command, tmpout, tmperr):
+      subproc_mock.tmpout = tmpout
+      subproc_mock.tmperr = tmperr
+      return subproc_mock
+    executor.lauch_python_subprocess = lauch_python_subprocess_method
+    runShellKillPgrp_method = MagicMock()
+    runShellKillPgrp_method.side_effect = lambda python : python.terminate()
+    executor.runShellKillPgrp = runShellKillPgrp_method
+    subproc_mock.returncode = None
+    thread = Thread(target =  executor.run_file, args = ("fake_command",
+                                    "fake_puppetFile", tmpoutfile, tmperrfile))
+    thread.start()
+    time.sleep(0.1)
+    subproc_mock.finished_event.wait()
+    self.assertEquals(subproc_mock.was_terminated, True, "Subprocess should be terminated due to timeout")
+
+
+  def test_watchdog_2(self):
+    """
+    Tries to catch false positive watchdog invocations
+    """
+    subproc_mock = self.Subprocess_mockup()
+    executor = PythonExecutor("/tmp", AmbariConfig().getConfig())
+    _, tmpoutfile = tempfile.mkstemp()
+    _, tmperrfile = tempfile.mkstemp()
+    executor.PYTHON_TIMEOUT_SECONDS =  5
+
+    def lauch_python_subprocess_method(command, tmpout, tmperr):
+      subproc_mock.tmpout = tmpout
+      subproc_mock.tmperr = tmperr
+      return subproc_mock
+    executor.lauch_python_subprocess = lauch_python_subprocess_method
+    runShellKillPgrp_method = MagicMock()
+    runShellKillPgrp_method.side_effect = lambda python : python.terminate()
+    executor.runShellKillPgrp = runShellKillPgrp_method
+    subproc_mock.returncode = 0
+    thread = Thread(target =  executor.run_file, args = ("fake_command",
+                                                         "fake_puppetFile", tmpoutfile, tmperrfile))
+    thread.start()
+    time.sleep(0.1)
+    subproc_mock.should_finish_event.set()
+    subproc_mock.finished_event.wait()
+    self.assertEquals(subproc_mock.was_terminated, False, "Subprocess should not be terminated before timeout")
+    self.assertEquals(subproc_mock.returncode, 0, "Subprocess should not be terminated before timeout")
+
+
+  def test_execution_results(self):
+    subproc_mock = self.Subprocess_mockup()
+    executor = PythonExecutor("/tmp", AmbariConfig().getConfig())
+    _, tmpoutfile = tempfile.mkstemp()
+    _, tmperrfile = tempfile.mkstemp()
+    executor.PYTHON_TIMEOUT_SECONDS =  5
+
+    def lauch_python_subprocess_method(command, tmpout, tmperr):
+      subproc_mock.tmpout = tmpout
+      subproc_mock.tmperr = tmperr
+      return subproc_mock
+    executor.lauch_python_subprocess = lauch_python_subprocess_method
+    runShellKillPgrp_method = MagicMock()
+    runShellKillPgrp_method.side_effect = lambda python : python.terminate()
+    executor.runShellKillPgrp = runShellKillPgrp_method
+    subproc_mock.returncode = 0
+    subproc_mock.should_finish_event.set()
+    result = executor.run_file("command", "file", tmpoutfile, tmperrfile)
+    self.assertEquals(result, {'exitcode': 0, 'stderr': 'Dummy err', 'stdout': 'Dummy output'})
+
+
+  class Subprocess_mockup():
+    """
+    It's not trivial to use PyMock instead of class here because we need state
+    and complex logics
+    """
+
+    returncode = 0
+
+    started_event = threading.Event()
+    should_finish_event = threading.Event()
+    finished_event = threading.Event()
+    was_terminated = False
+    tmpout = None
+    tmperr = None
+    pid=-1
+
+    def communicate(self):
+      self.started_event.set()
+      self.tmpout.write("Dummy output")
+      self.tmpout.flush()
+
+      self.tmperr.write("Dummy err")
+      self.tmperr.flush()
+      self.should_finish_event.wait()
+      self.finished_event.set()
+      pass
+
+    def terminate(self):
+      self.was_terminated = True
+      self.returncode = 17
+      self.should_finish_event.set()
+

+ 8 - 6
ambari-agent/src/test/python/TestStackVersionsFileHandler.py

@@ -42,9 +42,9 @@ class TestStackVersionsFileHandler(TestCase):
   def test_read_stack_version(self, touch_method):
   def test_read_stack_version(self, touch_method):
     stackVersionsFileHandler.versionsFilePath = dummyVersionsFile
     stackVersionsFileHandler.versionsFilePath = dummyVersionsFile
     result = stackVersionsFileHandler.read_stack_version("NAGIOS_SERVER")
     result = stackVersionsFileHandler.read_stack_version("NAGIOS_SERVER")
-    self.assertEquals(result, "HDP-1.2.1")
+    self.assertEquals(result, '{"stackName":"HDP","stackVersion":"1.2.1"}')
     result = stackVersionsFileHandler.read_stack_version("GANGLIA_SERVER")
     result = stackVersionsFileHandler.read_stack_version("GANGLIA_SERVER")
-    self.assertEquals(result, "HDP-1.2.2")
+    self.assertEquals(result, '{"stackName":"HDP","stackVersion":"1.2.2"}')
     result = stackVersionsFileHandler.read_stack_version("NOTEXISTING")
     result = stackVersionsFileHandler.read_stack_version("NOTEXISTING")
     self.assertEquals(result, stackVersionsFileHandler.DEFAULT_VER)
     self.assertEquals(result, stackVersionsFileHandler.DEFAULT_VER)
     self.assertTrue(touch_method.called)
     self.assertTrue(touch_method.called)
@@ -55,16 +55,18 @@ class TestStackVersionsFileHandler(TestCase):
     stackVersionsFileHandler.versionsFilePath = dummyVersionsFile
     stackVersionsFileHandler.versionsFilePath = dummyVersionsFile
     result = stackVersionsFileHandler.read_all_stack_versions()
     result = stackVersionsFileHandler.read_all_stack_versions()
     self.assertEquals(len(result.keys()), 4)
     self.assertEquals(len(result.keys()), 4)
-    self.assertEquals(result["NAGIOS_SERVER"], "HDP-1.2.1")
-    self.assertEquals(result["HCATALOG"], "HDP-1.3.0")
+    self.assertEquals(result["NAGIOS_SERVER"],
+          '{"stackName":"HDP","stackVersion":"1.2.1"}')
+    self.assertEquals(result["HCATALOG"],
+          '{"stackName":"HDP","stackVersion":"1.2.2"}')
     self.assertTrue(touch_method.called)
     self.assertTrue(touch_method.called)
 
 
 
 
   def test_extract(self):
   def test_extract(self):
-    s = "   NAGIOS_SERVER	\t  HDP-1.3.0  "
+    s = '   NAGIOS_SERVER	\t  {"stackName":"HDP","stackVersion":"1.3.0"}  '
     comp, ver = stackVersionsFileHandler.extract(s)
     comp, ver = stackVersionsFileHandler.extract(s)
     self.assertEqual(comp, "NAGIOS_SERVER")
     self.assertEqual(comp, "NAGIOS_SERVER")
-    self.assertEqual(ver, "HDP-1.3.0")
+    self.assertEqual(ver, '{"stackName":"HDP","stackVersion":"1.3.0"}')
     # testing wrong value
     # testing wrong value
     s = "   NAGIOS_SERVER	"
     s = "   NAGIOS_SERVER	"
     comp, ver = stackVersionsFileHandler.extract(s)
     comp, ver = stackVersionsFileHandler.extract(s)

+ 16 - 16
ambari-agent/src/test/python/TestUpgradeExecutor.py

@@ -22,7 +22,7 @@ from unittest import TestCase
 import unittest
 import unittest
 import StringIO
 import StringIO
 import socket
 import socket
-import os, sys, pprint
+import os, sys, pprint, json
 from mock.mock import patch
 from mock.mock import patch
 from mock.mock import MagicMock
 from mock.mock import MagicMock
 from mock.mock import create_autospec
 from mock.mock import create_autospec
@@ -45,10 +45,10 @@ class TestUpgradeExecutor(TestCase):
     # Checking matching versions
     # Checking matching versions
     command = {
     command = {
       'commandParams' :	{
       'commandParams' :	{
-        'source_stack_version' : 'HDP-1.3.0',
-        'target_stack_version' : 'HDP-1.3.0',
+        'source_stack_version' : '{\"stackName\":\"HDP\",\"stackVersion\":\"1.3.0\"}',
+        'target_stack_version' : '{\"stackName\":\"HDP\",\"stackVersion\":\"1.3.0\"}',
        },
        },
-      'component' : 'HDFS'
+      'role' : 'HDFS'
     }
     }
     result = executor.perform_stack_upgrade(command, 'tmpout', 'tmperr')
     result = executor.perform_stack_upgrade(command, 'tmpout', 'tmperr')
     self.assertTrue('matches current stack version' in result['stdout'])
     self.assertTrue('matches current stack version' in result['stdout'])
@@ -57,10 +57,10 @@ class TestUpgradeExecutor(TestCase):
     write_stack_version_method.reset()
     write_stack_version_method.reset()
     command = {
     command = {
       'commandParams' :	{
       'commandParams' :	{
-        'source_stack_version' : 'HDP-1.0.1',
-        'target_stack_version' : 'HDP-1.3.0',
+        'source_stack_version' : '{\"stackName\":\"HDP\",\"stackVersion\":\"1.0.1\"}',
+        'target_stack_version' : '{\"stackName\":\"HDP\",\"stackVersion\":\"1.3.0\"}',
       },
       },
-      'component' : 'HDFS'
+      'role' : 'HDFS'
     }
     }
     isdir_method.return_value = False
     isdir_method.return_value = False
     result = executor.perform_stack_upgrade(command, 'tmpout', 'tmperr')
     result = executor.perform_stack_upgrade(command, 'tmpout', 'tmperr')
@@ -70,10 +70,10 @@ class TestUpgradeExecutor(TestCase):
     write_stack_version_method.reset()
     write_stack_version_method.reset()
     command = {
     command = {
       'commandParams' :	{
       'commandParams' :	{
-        'source_stack_version' : 'HDP-1.0.1',
-        'target_stack_version' : 'HDP-1.3.0',
+        'source_stack_version' : '{\"stackName\":\"HDP\",\"stackVersion\":\"1.0.1\"}',
+        'target_stack_version' : '{\"stackName\":\"HDP\",\"stackVersion\":\"1.3.0\"}',
       },
       },
-      'component' : 'HDFS'
+      'role' : 'HDFS'
     }
     }
     isdir_method.return_value = True
     isdir_method.return_value = True
     executor.execute_dir = lambda command, basedir, dir, tmpout, tmperr : \
     executor.execute_dir = lambda command, basedir, dir, tmpout, tmperr : \
@@ -91,10 +91,10 @@ class TestUpgradeExecutor(TestCase):
     write_stack_version_method.reset()
     write_stack_version_method.reset()
     command = {
     command = {
       'commandParams' :	{
       'commandParams' :	{
-        'source_stack_version' : 'HDP-1.0.1',
-        'target_stack_version' : 'HDP-1.3.0',
+        'source_stack_version' : '{\"stackName\":\"HDP\",\"stackVersion\":\"1.0.1\"}',
+        'target_stack_version' : '{\"stackName\":\"HDP\",\"stackVersion\":\"1.3.0\"}',
       },
       },
-      'component' : 'HDFS'
+      'role' : 'HDFS'
     }
     }
     isdir_method.return_value = True
     isdir_method.return_value = True
     executor.execute_dir = lambda command, basedir, dir, tmpout, tmperr :\
     executor.execute_dir = lambda command, basedir, dir, tmpout, tmperr :\
@@ -131,11 +131,11 @@ class TestUpgradeExecutor(TestCase):
   def test_split_stack_version(self):
   def test_split_stack_version(self):
     executor = UpgradeExecutor.UpgradeExecutor('pythonExecutor',
     executor = UpgradeExecutor.UpgradeExecutor('pythonExecutor',
              'puppetExecutor', AmbariConfig.AmbariConfig().getConfig())
              'puppetExecutor', AmbariConfig.AmbariConfig().getConfig())
-    result = executor.split_stack_version("HDP-1.2.1")
+    result = executor.split_stack_version('{\"stackName\":\"HDP\",\"stackVersion\":\"1.2.1\"}')
     self.assertEquals(result, ('HDP', '1', '2'))
     self.assertEquals(result, ('HDP', '1', '2'))
-    result = executor.split_stack_version("HDP-1.3")
+    result = executor.split_stack_version('{\"stackName\":\"HDP\",\"stackVersion\":\"1.3\"}')
     self.assertEquals(result, ('HDP', '1', '3'))
     self.assertEquals(result, ('HDP', '1', '3'))
-    result = executor.split_stack_version("ComplexStackVersion-1.3.4.2.2")
+    result = executor.split_stack_version('{\"stackName\":\"ComplexStackVersion\",\"stackVersion\":\"1.3.4.2.2\"}')
     self.assertEquals(result, ('ComplexStackVersion', '1', '3'))
     self.assertEquals(result, ('ComplexStackVersion', '1', '3'))
     pass
     pass
 
 

+ 4 - 4
ambari-agent/src/test/python/dummy_files/dummy_current_stack

@@ -1,4 +1,4 @@
-DATANODE    HDP-1.2.0
-NAGIOS_SERVER    HDP-1.2.1
-HCATALOG    HDP-1.3.0
-GANGLIA_SERVER  HDP-1.2.2
+DATANODE    {"stackName":"HDP","stackVersion":"1.2.0"}
+NAGIOS_SERVER   {"stackName":"HDP","stackVersion":"1.2.1"}
+HCATALOG    {"stackName":"HDP","stackVersion":"1.2.2"}
+GANGLIA_SERVER  {"stackName":"HDP","stackVersion":"1.2.2"}

+ 222 - 0
ambari-agent/src/test/python/examples/ControllerTester.py

@@ -0,0 +1,222 @@
+from ambari_agent import Controller
+import pprint, json, os, time, sys
+import tempfile
+from urllib2 import Request, urlopen, URLError
+from mock.mock import patch, MagicMock, call
+from ambari_agent.AmbariConfig  import AmbariConfig
+import Queue
+import logging
+from ambari_agent import PuppetExecutor, PythonExecutor
+
+logger=logging.getLogger()
+
+queue = Queue.Queue()
+
+# Set to True to replace python and puppet calls with mockups
+disable_python_and_puppet = False
+
+# Values from the list below are returned in responce to agent requests (one per
+# request). When every value has been returned, the last element of list is
+# returned on every subsequent request.
+responces = [
+  """{"responseId":"n",
+  "response":"OK"}""",
+
+  """
+  {
+    "responseId":"n",
+    "restartAgent": "False",
+    "executionCommands":
+      [{
+        "commandId": "31-1",
+        "role" : "DATANODE",
+        "taskId" : 2,
+        "clusterName" : "clusterName",
+        "serviceName" : "HDFS",
+        "roleCommand" : "UPGRADE",
+        "hostname" : "localhost.localdomain",
+        "hostLevelParams": {},
+        "clusterHostInfo": "clusterHostInfo",
+        "configurations": {},
+        "commandType": "EXECUTION_COMMAND",
+        "configurations": {"global" : {}},
+        "roleParams": {},
+        "commandParams" :	{
+          "source_stack_version": "{\\"stackName\\":\\"HDP\\",\\"stackVersion\\":\\"1.2.2\\"}",
+          "target_stack_version": "{\\"stackName\\":\\"HDP\\",\\"stackVersion\\":\\"1.3.0\\"}"
+        },
+        "clusterHostInfo": {
+          "ambari_db_server_host": [
+              "dev.hortonworks.com"
+          ],
+          "ganglia_server_host": [
+              "dev.hortonworks.com"
+          ],
+          "nagios_server_host": [
+              "dev.hortonworks.com"
+          ],
+          "namenode_host": [
+              "dev.hortonworks.com"
+          ],
+          "slave_hosts": [
+              "dev.hortonworks.com"
+          ]
+        }
+      }],
+    "statusCommands":[]
+  }
+  """,
+
+  """
+  {
+    "responseId":"n",
+    "restartAgent": "False",
+    "executionCommands": [],
+    "statusCommands":[]
+  }
+  """
+]
+
+class Int(object):
+  def __init__(self, value):
+    self.value = value
+
+  def inc(self):
+    self.value += 1
+
+  def val(self):
+    return self.value
+
+responseId = Int(0)
+
+def main():
+
+  if disable_python_and_puppet:
+    with patch.object(PuppetExecutor.PuppetExecutor, 'just_run_one_file') \
+                                          as just_run_one_file_pp_method:
+      just_run_one_file_pp_method.side_effect = \
+              lambda command, file, tmpout, tmperr: {
+          'exitcode' : 0,
+          'stdout'   : "Simulated run of pp %s" % file,
+          'stderr'   : 'None'
+        }
+      with patch.object(PythonExecutor.PythonExecutor, 'run_file') \
+                                          as run_file_py_method:
+        run_file_py_method.side_effect = \
+              lambda command, file, tmpoutfile, tmperrfile: {
+          'exitcode' : 0,
+          'stdout'   : "Simulated run of py %s" % file,
+          'stderr'   : 'None'
+        }
+        run_simulation()
+  else:
+    run_simulation()
+
+
+
+def run_simulation():
+  Controller.logger = MagicMock()
+  sendRequest_method = MagicMock()
+
+  tmpfile = tempfile.gettempdir()
+
+  config = AmbariConfig().getConfig()
+  config.set('agent', 'prefix', tmpfile)
+
+  scriptsDir = os.path.join(os.getcwd(), os.pardir,os.pardir,
+    os.pardir, 'main', 'upgrade_stack')
+  config.set('stack', 'upgradeScriptsDir', scriptsDir)
+
+  controller = Controller.Controller(config)
+  controller.sendRequest = sendRequest_method
+  controller.netutil.HEARTBEAT_IDDLE_INTERVAL_SEC = 0.1
+  controller.netutil.HEARTBEAT_NOT_IDDLE_INTERVAL_SEC = 0.1
+  controller.range = 1
+
+
+
+  for responce in responces:
+    queue.put(responce)
+
+  def send_stub(url, data):
+    logger.info("Controller sends data to %s :" % url)
+    logger.info(pprint.pformat(data))
+    if not queue.empty():
+      responce = queue.get()
+    else:
+      responce = responces[-1]
+      logger.info("There is no predefined responce available, sleeping for 30 sec")
+      time.sleep(30)
+    responce = json.loads(responce)
+    responseId.inc()
+    responce["responseId"] = responseId.val()
+    responce = json.dumps(responce)
+    logger.info("Returning data to Controller:" + responce)
+    return responce
+
+  sendRequest_method.side_effect = send_stub
+
+  logger.setLevel(logging.DEBUG)
+  formatter = logging.Formatter("%(asctime)s %(filename)s:%(lineno)d - \
+        %(message)s")
+  stream_handler = logging.StreamHandler()
+  stream_handler.setFormatter(formatter)
+  logger.addHandler(stream_handler)
+  logger.info("Starting")
+
+  controller.start()
+  controller.actionQueue.IDLE_SLEEP_TIME = 0.1
+  controller.run()
+
+
+if __name__ == '__main__':
+#  s =   """
+#  {
+#    "responseId":"n",
+#    "restartAgent": "False",
+#    "executionCommands":
+#      [{
+#        "commandId": "31-1",
+#        "role" : "DATANODE",
+#        "taskId" : 2,
+#        "clusterName" : "clusterName",
+#        "serviceName" : "HDFS",
+#        "roleCommand" : "UPGRADE",
+#        "hostname" : "localhost.localdomain",
+#        "hostLevelParams": {},
+#        "clusterHostInfo": "clusterHostInfo",
+#        "configurations": {},
+#        "commandType": "EXECUTION_COMMAND",
+#        "configurations": {"global" : {}},
+#        "roleParams": {},
+#        "commandParams" :	{
+#          "commandParams": {"source_stack_version": "{\\"stackName\\":\\"HDP\\",\\"stackVersion\\":\\"1.2.0\\"}", "target_stack_version": "{\\"stackName\\":\\"HDP\\",\\"stackVersion\\":\\"1.2.2\\"}"}
+#        },
+#        "clusterHostInfo": {
+#          "ambari_db_server_host": [
+#              "dev.hortonworks.com"
+#          ],
+#          "ganglia_server_host": [
+#              "dev.hortonworks.com"
+#          ],
+#          "nagios_server_host": [
+#              "dev.hortonworks.com"
+#          ],
+#          "namenode_host": [
+#              "dev.hortonworks.com"
+#          ],
+#          "slave_hosts": [
+#              "dev.hortonworks.com"
+#          ]
+#        }
+#      }],
+#    "statusCommands":[]
+#  }
+#  """
+#  t = json.loads(s)
+#  pprint.pprint(t)
+
+  main()
+
+
+

+ 45 - 44
ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java

@@ -17,6 +17,7 @@
  */
  */
 package org.apache.ambari.server.agent;
 package org.apache.ambari.server.agent;
 
 
+import com.google.gson.Gson;
 import com.google.inject.Inject;
 import com.google.inject.Inject;
 import com.google.inject.Injector;
 import com.google.inject.Injector;
 import com.google.inject.Singleton;
 import com.google.inject.Singleton;
@@ -66,7 +67,9 @@ public class HeartBeatHandler {
   @Inject
   @Inject
   ActionMetadata actionMetadata;
   ActionMetadata actionMetadata;
   @Inject
   @Inject
-  HBaseMasterPortScanner scaner;
+  HBaseMasterPortScanner scanner;
+  @Inject
+  private Gson gson;
 
 
   private Map<String, Long> hostResponseIds = new HashMap<String, Long>();
   private Map<String, Long> hostResponseIds = new HashMap<String, Long>();
   private Map<String, HeartBeatResponse> hostResponses = new HashMap<String, HeartBeatResponse>();
   private Map<String, HeartBeatResponse> hostResponses = new HashMap<String, HeartBeatResponse>();
@@ -78,7 +81,7 @@ public class HeartBeatHandler {
     this.actionQueue = aq;
     this.actionQueue = aq;
     this.actionManager = am;
     this.actionManager = am;
     this.heartbeatMonitor = new HeartbeatMonitor(fsm, aq, am, 60000);
     this.heartbeatMonitor = new HeartbeatMonitor(fsm, aq, am, 60000);
-    this.heartbeatMonitor.setScaner(scaner);
+    this.heartbeatMonitor.setScanner(scanner);
     injector.injectMembers(this);
     injector.injectMembers(this);
   }
   }
 
 
@@ -151,7 +154,7 @@ public class HeartBeatHandler {
         hostObject.handleEvent(new HostUnhealthyHeartbeatEvent(hostname, now,
         hostObject.handleEvent(new HostUnhealthyHeartbeatEvent(hostname, now,
             null));       
             null));       
       }
       }
-      if(hostState != hostObject.getState()) scaner.updateHBaseMaster(hostObject);
+      if(hostState != hostObject.getState()) scanner.updateHBaseMaster(hostObject);
     } catch (InvalidStateTransitionException ex) {
     } catch (InvalidStateTransitionException ex) {
       LOG.warn("Asking agent to reregister due to " + ex.getMessage(),  ex);
       LOG.warn("Asking agent to reregister due to " + ex.getMessage(),  ex);
       hostObject.setState(HostState.INIT);
       hostObject.setState(HostState.INIT);
@@ -205,7 +208,9 @@ public class HeartBeatHandler {
             scHost.handleEvent(new ServiceComponentHostOpInProgressEvent(schName,
             scHost.handleEvent(new ServiceComponentHostOpInProgressEvent(schName,
                     hostname, now));
                     hostname, now));
           }
           }
-          if(state != scHost.getState() && schName.equals(Role.HBASE_MASTER.toString())) scaner.updateHBaseMaster(cl);
+          if(state != scHost.getState() && schName.equals(Role.HBASE_MASTER.toString())) {
+            scanner.updateHBaseMaster(cl);
+          }
         } catch (ServiceComponentNotFoundException scnex) {
         } catch (ServiceComponentNotFoundException scnex) {
           LOG.warn("Service component not found ", scnex);
           LOG.warn("Service component not found ", scnex);
         } catch (InvalidStateTransitionException ex) {
         } catch (InvalidStateTransitionException ex) {
@@ -220,7 +225,7 @@ public class HeartBeatHandler {
   protected void processStatusReports(HeartBeat heartbeat,
   protected void processStatusReports(HeartBeat heartbeat,
                                       String hostname,
                                       String hostname,
                                       Clusters clusterFsm) throws
                                       Clusters clusterFsm) throws
-                                                            AmbariException {
+      AmbariException {
     Set<Cluster> clusters = clusterFsm.getClustersForHost(hostname);
     Set<Cluster> clusters = clusterFsm.getClustersForHost(hostname);
     for (Cluster cl : clusters) {
     for (Cluster cl : clusters) {
       for (ComponentStatus status : heartbeat.componentStatus) {
       for (ComponentStatus status : heartbeat.componentStatus) {
@@ -230,9 +235,9 @@ public class HeartBeatHandler {
             String componentName = status.getComponentName();
             String componentName = status.getComponentName();
             if (svc.getServiceComponents().containsKey(componentName)) {
             if (svc.getServiceComponents().containsKey(componentName)) {
               ServiceComponent svcComp = svc.getServiceComponent(
               ServiceComponent svcComp = svc.getServiceComponent(
-                      componentName);
+                  componentName);
               ServiceComponentHost scHost = svcComp.getServiceComponentHost(
               ServiceComponentHost scHost = svcComp.getServiceComponentHost(
-                      hostname);
+                  hostname);
               State prevState = scHost.getState();
               State prevState = scHost.getState();
               State liveState = State.valueOf(State.class, status.getStatus());
               State liveState = State.valueOf(State.class, status.getStatus());
               if (prevState.equals(State.INSTALLED)
               if (prevState.equals(State.INSTALLED)
@@ -242,62 +247,58 @@ public class HeartBeatHandler {
                   || prevState.equals(State.STOPPING)
                   || prevState.equals(State.STOPPING)
                   || prevState.equals(State.STOP_FAILED)) {
                   || prevState.equals(State.STOP_FAILED)) {
                 scHost.setState(liveState);
                 scHost.setState(liveState);
-                LOG.info("State of service component " + componentName
-                    + " of service " + status.getServiceName()
-                    + " of cluster " + status.getClusterName()
-                    + " has changed from " + prevState + " to " + liveState
-                    + " at host " + hostname);
-                if (!prevState.equals(liveState)
-                    && scHost.getServiceComponentName().equals(Role.HBASE_MASTER.toString())) {
-                    scaner.updateHBaseMaster(scHost);
+                if (!prevState.equals(liveState)) {
+                  LOG.info("State of service component " + componentName
+                      + " of service " + status.getServiceName()
+                      + " of cluster " + status.getClusterName()
+                      + " has changed from " + prevState + " to " + liveState
+                      + " at host " + hostname);
+                  if (scHost.getServiceComponentName().equals(Role.HBASE_MASTER.toString())) {
+                    scanner.updateHBaseMaster(scHost);
+                  }
                 }
                 }
               }
               }
 
 
-              if(null != status.getStackVersion() && !status.getStackVersion().isEmpty())
-              {
-                scHost.setStackVersion(new StackId(status.getStackVersion()));
+              if (null != status.getStackVersion() && !status.getStackVersion().isEmpty()) {
+                scHost.setStackVersion(gson.fromJson(status.getStackVersion(), StackId.class));
               }
               }
 
 
               // TODO need to get config version and stack version from live state
               // TODO need to get config version and stack version from live state
             } else {
             } else {
               // TODO: What should be done otherwise?
               // TODO: What should be done otherwise?
             }
             }
-          }
-          catch (ServiceNotFoundException e) {
+          } catch (ServiceNotFoundException e) {
             LOG.warn("Received a live status update for a non-initialized"
             LOG.warn("Received a live status update for a non-initialized"
-                    + " service"
-                    + ", clusterName=" + status.getClusterName()
-                    + ", serviceName=" + status.getServiceName());
+                + " service"
+                + ", clusterName=" + status.getClusterName()
+                + ", serviceName=" + status.getServiceName());
             // FIXME ignore invalid live update and continue for now?
             // FIXME ignore invalid live update and continue for now?
             continue;
             continue;
-          }
-          catch (ServiceComponentNotFoundException e) {
+          } catch (ServiceComponentNotFoundException e) {
             LOG.warn("Received a live status update for a non-initialized"
             LOG.warn("Received a live status update for a non-initialized"
-                    + " servicecomponent"
-                    + ", clusterName=" + status.getClusterName()
-                    + ", serviceName=" + status.getServiceName()
-                    + ", componentName=" + status.getComponentName());
+                + " servicecomponent"
+                + ", clusterName=" + status.getClusterName()
+                + ", serviceName=" + status.getServiceName()
+                + ", componentName=" + status.getComponentName());
             // FIXME ignore invalid live update and continue for now?
             // FIXME ignore invalid live update and continue for now?
             continue;
             continue;
-          }
-          catch (ServiceComponentHostNotFoundException e) {
+          } catch (ServiceComponentHostNotFoundException e) {
             LOG.warn("Received a live status update for a non-initialized"
             LOG.warn("Received a live status update for a non-initialized"
-                    + " service"
-                    + ", clusterName=" + status.getClusterName()
-                    + ", serviceName=" + status.getServiceName()
-                    + ", componentName=" + status.getComponentName()
-                    + ", hostname=" + hostname);
+                + " service"
+                + ", clusterName=" + status.getClusterName()
+                + ", serviceName=" + status.getServiceName()
+                + ", componentName=" + status.getComponentName()
+                + ", hostname=" + hostname);
             // FIXME ignore invalid live update and continue for now?
             // FIXME ignore invalid live update and continue for now?
             continue;
             continue;
-          }
-          catch (RuntimeException e) {
+          } catch (RuntimeException e) {
             LOG.warn("Received a live status with invalid payload"
             LOG.warn("Received a live status with invalid payload"
-                    + " service"
-                    + ", clusterName=" + status.getClusterName()
-                    + ", serviceName=" + status.getServiceName()
-                    + ", componentName=" + status.getComponentName()
-                    + ", hostname=" + hostname
-                    + ", error=" + e.getMessage());
+                + " service"
+                + ", clusterName=" + status.getClusterName()
+                + ", serviceName=" + status.getServiceName()
+                + ", componentName=" + status.getComponentName()
+                + ", hostname=" + hostname
+                + ", error=" + e.getMessage());
             continue;
             continue;
           }
           }
         }
         }

+ 4 - 4
ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java

@@ -47,10 +47,10 @@ public class HeartbeatMonitor implements Runnable {
   private final int threadWakeupInterval; //1 minute
   private final int threadWakeupInterval; //1 minute
   private volatile boolean shouldRun = true;
   private volatile boolean shouldRun = true;
   private Thread monitorThread = null;
   private Thread monitorThread = null;
-  private HBaseMasterPortScanner scaner;
+  private HBaseMasterPortScanner scanner;
 
 
-  public void setScaner(HBaseMasterPortScanner scaner) {
-        this.scaner = scaner;
+  public void setScanner(HBaseMasterPortScanner scanner) {
+        this.scanner = scanner;
   }
   }
 
 
   public HeartbeatMonitor(Clusters fsm, ActionQueue aq, ActionManager am,
   public HeartbeatMonitor(Clusters fsm, ActionQueue aq, ActionManager am,
@@ -116,7 +116,7 @@ public class HeartbeatMonitor implements Runnable {
         LOG.warn("Hearbeat lost from host "+host);
         LOG.warn("Hearbeat lost from host "+host);
         //Heartbeat is expired
         //Heartbeat is expired
         hostObj.handleEvent(new HostHeartbeatLostEvent(host));
         hostObj.handleEvent(new HostHeartbeatLostEvent(host));
-        if(hostState != hostObj.getState() && scaner != null) scaner.updateHBaseMaster(hostObj);
+        if(hostState != hostObj.getState() && scanner != null) scanner.updateHBaseMaster(hostObj);
         //Purge action queue
         //Purge action queue
         actionQueue.dequeueAll(host);
         actionQueue.dequeueAll(host);
         //notify action manager
         //notify action manager

+ 75 - 16
ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java

@@ -30,7 +30,23 @@ import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Map.Entry;
 import java.util.Set;
 import java.util.Set;
 import java.util.TreeMap;
 import java.util.TreeMap;
-import org.apache.ambari.server.*;
+
+import com.google.gson.Gson;
+import com.google.inject.Inject;
+import com.google.inject.Injector;
+import com.google.inject.Singleton;
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.ClusterNotFoundException;
+import org.apache.ambari.server.DuplicateResourceException;
+import org.apache.ambari.server.HostNotFoundException;
+import org.apache.ambari.server.ObjectNotFoundException;
+import org.apache.ambari.server.ParentObjectNotFoundException;
+import org.apache.ambari.server.Role;
+import org.apache.ambari.server.RoleCommand;
+import org.apache.ambari.server.ServiceComponentHostNotFoundException;
+import org.apache.ambari.server.ServiceComponentNotFoundException;
+import org.apache.ambari.server.ServiceNotFoundException;
+import org.apache.ambari.server.StackAccessException;
 import org.apache.ambari.server.actionmanager.ActionManager;
 import org.apache.ambari.server.actionmanager.ActionManager;
 import org.apache.ambari.server.actionmanager.HostRoleCommand;
 import org.apache.ambari.server.actionmanager.HostRoleCommand;
 import org.apache.ambari.server.actionmanager.RequestStatus;
 import org.apache.ambari.server.actionmanager.RequestStatus;
@@ -45,19 +61,40 @@ import org.apache.ambari.server.security.authorization.User;
 import org.apache.ambari.server.security.authorization.Users;
 import org.apache.ambari.server.security.authorization.Users;
 import org.apache.ambari.server.serveraction.ServerAction;
 import org.apache.ambari.server.serveraction.ServerAction;
 import org.apache.ambari.server.stageplanner.RoleGraph;
 import org.apache.ambari.server.stageplanner.RoleGraph;
-import org.apache.ambari.server.state.*;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.ComponentInfo;
+import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.ConfigFactory;
+import org.apache.ambari.server.state.DesiredConfig;
+import org.apache.ambari.server.state.Host;
+import org.apache.ambari.server.state.OperatingSystemInfo;
+import org.apache.ambari.server.state.PropertyInfo;
+import org.apache.ambari.server.state.RepositoryInfo;
+import org.apache.ambari.server.state.Service;
+import org.apache.ambari.server.state.ServiceComponent;
+import org.apache.ambari.server.state.ServiceComponentFactory;
+import org.apache.ambari.server.state.ServiceComponentHost;
+import org.apache.ambari.server.state.ServiceComponentHostEvent;
+import org.apache.ambari.server.state.ServiceComponentHostFactory;
+import org.apache.ambari.server.state.ServiceFactory;
+import org.apache.ambari.server.state.ServiceInfo;
+import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.StackInfo;
+import org.apache.ambari.server.state.State;
 import org.apache.ambari.server.state.fsm.InvalidStateTransitionException;
 import org.apache.ambari.server.state.fsm.InvalidStateTransitionException;
-import org.apache.ambari.server.state.svccomphost.*;
+import org.apache.ambari.server.state.svccomphost.ServiceComponentHostInstallEvent;
+import org.apache.ambari.server.state.svccomphost.ServiceComponentHostMaintenanceEvent;
+import org.apache.ambari.server.state.svccomphost.ServiceComponentHostOpInProgressEvent;
+import org.apache.ambari.server.state.svccomphost.ServiceComponentHostRestoreEvent;
+import org.apache.ambari.server.state.svccomphost.ServiceComponentHostStartEvent;
+import org.apache.ambari.server.state.svccomphost.ServiceComponentHostStopEvent;
+import org.apache.ambari.server.state.svccomphost.ServiceComponentHostUpgradeEvent;
 import org.apache.ambari.server.utils.StageUtils;
 import org.apache.ambari.server.utils.StageUtils;
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.slf4j.LoggerFactory;
 
 
-import com.google.gson.Gson;
-import com.google.inject.Inject;
-import com.google.inject.Injector;
-import com.google.inject.Singleton;
-
 @Singleton
 @Singleton
 public class AmbariManagementControllerImpl implements
 public class AmbariManagementControllerImpl implements
     AmbariManagementController {
     AmbariManagementController {
@@ -103,7 +140,6 @@ public class AmbariManagementControllerImpl implements
   @Inject
   @Inject
   private Configuration configs;
   private Configuration configs;
 
 
-
   final private String masterHostname;
   final private String masterHostname;
 
 
   final private static String JDK_RESOURCE_LOCATION =
   final private static String JDK_RESOURCE_LOCATION =
@@ -1388,11 +1424,17 @@ public class AmbariManagementControllerImpl implements
     boolean requiresVersionUpdate = requestedVersionString != null
     boolean requiresVersionUpdate = requestedVersionString != null
         && !requestedVersionString.isEmpty();
         && !requestedVersionString.isEmpty();
     if (requiresVersionUpdate) {
     if (requiresVersionUpdate) {
+      LOG.info("Received a cluster update request"
+          + ", clusterName=" + request.getClusterName()
+          + ", request=" + request);
       requestedVersion = new StackId(requestedVersionString);
       requestedVersion = new StackId(requestedVersionString);
       if (!requestedVersion.getStackName().equals(currentVersion.getStackName())) {
       if (!requestedVersion.getStackName().equals(currentVersion.getStackName())) {
         throw new AmbariException("Upgrade not possible between different stacks.");
         throw new AmbariException("Upgrade not possible between different stacks.");
       }
       }
       requiresVersionUpdate = !currentVersion.equals(requestedVersion);
       requiresVersionUpdate = !currentVersion.equals(requestedVersion);
+      if(!requiresVersionUpdate) {
+        LOG.info("The cluster is already at " + currentVersion);
+      }
     }
     }
 
 
     if (requiresVersionUpdate && requiresHostListUpdate) {
     if (requiresVersionUpdate && requiresHostListUpdate) {
@@ -1407,6 +1449,7 @@ public class AmbariManagementControllerImpl implements
     }
     }
 
 
     if (requiresVersionUpdate) {
     if (requiresVersionUpdate) {
+      LOG.info("Upgrade cluster request received for stack " + requestedVersion);
       boolean retry = false;
       boolean retry = false;
       if (0 == currentVersion.compareTo(desiredVersion)) {
       if (0 == currentVersion.compareTo(desiredVersion)) {
         if (1 != requestedVersion.compareTo(currentVersion)) {
         if (1 != requestedVersion.compareTo(currentVersion)) {
@@ -1427,6 +1470,7 @@ public class AmbariManagementControllerImpl implements
         }
         }
       } else {
       } else {
         retry = true;
         retry = true;
+        LOG.info("Received upgrade request is a retry.");
         if (0 != requestedVersion.compareTo(desiredVersion)) {
         if (0 != requestedVersion.compareTo(desiredVersion)) {
           throw new AmbariException("Upgrade in progress to target version : "
           throw new AmbariException("Upgrade in progress to target version : "
               + desiredVersion
               + desiredVersion
@@ -1472,11 +1516,13 @@ public class AmbariManagementControllerImpl implements
       Map<String, Map<State, List<ServiceComponentHost>>> changedScHosts =
       Map<String, Map<State, List<ServiceComponentHost>>> changedScHosts =
           new HashMap<String, Map<State, List<ServiceComponentHost>>>();
           new HashMap<String, Map<State, List<ServiceComponentHost>>>();
 
 
+      LOG.info("Identifying components to upgrade.");
       fillComponentsToUpgrade(request, cluster, changedServices, changedComps, changedScHosts);
       fillComponentsToUpgrade(request, cluster, changedServices, changedComps, changedScHosts);
       Map<String, String> requestParameters = new HashMap<String, String>();
       Map<String, String> requestParameters = new HashMap<String, String>();
       requestParameters.put(Configuration.UPGRADE_TO_STACK, gson.toJson(requestedVersion));
       requestParameters.put(Configuration.UPGRADE_TO_STACK, gson.toJson(requestedVersion));
       requestParameters.put(Configuration.UPGRADE_FROM_STACK, gson.toJson(currentVersion));
       requestParameters.put(Configuration.UPGRADE_FROM_STACK, gson.toJson(currentVersion));
 
 
+      LOG.info("Creating stages for upgrade.");
       List<Stage> stages = doStageCreation(cluster, changedServices,
       List<Stage> stages = doStageCreation(cluster, changedServices,
           changedComps, changedScHosts, requestParameters);
           changedComps, changedScHosts, requestParameters);
 
 
@@ -1487,7 +1533,9 @@ public class AmbariManagementControllerImpl implements
       addFinalizeUpgradeAction(cluster, stages);
       addFinalizeUpgradeAction(cluster, stages);
       persistStages(stages);
       persistStages(stages);
       updateServiceStates(changedServices, changedComps, changedScHosts);
       updateServiceStates(changedServices, changedComps, changedScHosts);
-      return getRequestStatusResponse(stages.get(0).getRequestId());
+      long requestId = stages.get(0).getRequestId();
+      LOG.info(stages.size() + " stages created for upgrade and the request id is " + requestId);
+      return getRequestStatusResponse(requestId);
     }
     }
 
 
     return null;
     return null;
@@ -2895,6 +2943,9 @@ public class AmbariManagementControllerImpl implements
         }
         }
       }
       }
 
 
+      // If upgrade request comes without state information then its an error
+      boolean upgradeRequest = checkIfUpgradeRequestAndValidate(request, cluster, s, sc, sch);
+
       if (newState == null) {
       if (newState == null) {
         if (LOG.isDebugEnabled()) {
         if (LOG.isDebugEnabled()) {
           LOG.debug("Nothing to do for new updateServiceComponentHost request"
           LOG.debug("Nothing to do for new updateServiceComponentHost request"
@@ -2915,8 +2966,6 @@ public class AmbariManagementControllerImpl implements
 
 
       seenNewStates.add(newState);
       seenNewStates.add(newState);
 
 
-      boolean upgradeRequest = checkIfUpgradeRequestAndValidate(request, cluster, s, sc, sch);
-
       if (!processingUpgradeRequest && upgradeRequest) {
       if (!processingUpgradeRequest && upgradeRequest) {
         processingUpgradeRequest = true;
         processingUpgradeRequest = true;
         // this needs to be the first request
         // this needs to be the first request
@@ -3185,19 +3234,29 @@ public class AmbariManagementControllerImpl implements
           throw getHostComponentUpgradeException(request, cluster, s, sc, sch,
           throw getHostComponentUpgradeException(request, cluster, s, sc, sch,
               "Upgrade cannot be accompanied with config modification");
               "Upgrade cannot be accompanied with config modification");
         }
         }
-        if (!request.getDesiredState().equals(State.INSTALLED.toString())) {
+        if (request.getDesiredState() == null
+            || !request.getDesiredState().equals(State.INSTALLED.toString())) {
           throw getHostComponentUpgradeException(request, cluster, s, sc, sch,
           throw getHostComponentUpgradeException(request, cluster, s, sc, sch,
               "The desired state for an upgrade request must be " + State.INSTALLED);
               "The desired state for an upgrade request must be " + State.INSTALLED);
         }
         }
+        LOG.info("Received upgrade request to " + requestedStackId + " for "
+            + "component " + sch.getServiceComponentName()
+            + " on " + sch.getHostName());
+      } else {
+        LOG.info("Stack id " + requestedStackId + " provided in the request matches"
+            + " the current stack id of the "
+            + "component " + sch.getServiceComponentName()
+            + " on " + sch.getHostName() + ". It will not be upgraded.");
       }
       }
     }
     }
 
 
     return isUpgradeRequest;
     return isUpgradeRequest;
   }
   }
 
 
-  private AmbariException getHostComponentUpgradeException(ServiceComponentHostRequest request, Cluster cluster,
-                                                           Service s, ServiceComponent sc, ServiceComponentHost sch,
-                                                           String message) throws AmbariException {
+  private AmbariException getHostComponentUpgradeException(
+      ServiceComponentHostRequest request, Cluster cluster,
+      Service s, ServiceComponent sc, ServiceComponentHost sch,
+      String message) throws AmbariException {
     return new AmbariException(message
     return new AmbariException(message
         + ", clusterName=" + cluster.getClusterName()
         + ", clusterName=" + cluster.getClusterName()
         + ", clusterId=" + cluster.getClusterId()
         + ", clusterId=" + cluster.getClusterId()

+ 8 - 5
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java

@@ -140,11 +140,14 @@ class ClusterResourceProvider extends AbstractResourceProvider {
   public RequestStatus updateResources(Request request, Predicate predicate)
   public RequestStatus updateResources(Request request, Predicate predicate)
       throws SystemException, UnsupportedPropertyException, NoSuchResourceException, NoSuchParentResourceException {
       throws SystemException, UnsupportedPropertyException, NoSuchResourceException, NoSuchParentResourceException {
 
 
-    
-    for (Map<String, Object> propertyMap : getPropertyMaps(request.getProperties().iterator().next(), predicate)) {
+    RequestStatusResponse response = null;
+    Set<Map<String, Object>> propertyMaps = getPropertyMaps(request.getProperties().iterator().next(), predicate);
+    if (propertyMaps.size() > 1) {
+      throw new SystemException("Single update request cannot modify multiple clusters.", null);
+    }
+    for (Map<String, Object> propertyMap : propertyMaps) {
       final ClusterRequest clusterRequest = getRequest(propertyMap);
       final ClusterRequest clusterRequest = getRequest(propertyMap);
-
-      modifyResources(new Command<RequestStatusResponse>() {
+      response = modifyResources(new Command<RequestStatusResponse>() {
         @Override
         @Override
         public RequestStatusResponse invoke() throws AmbariException {
         public RequestStatusResponse invoke() throws AmbariException {
           return getManagementController().updateCluster(clusterRequest);
           return getManagementController().updateCluster(clusterRequest);
@@ -152,7 +155,7 @@ class ClusterResourceProvider extends AbstractResourceProvider {
       });
       });
     }
     }
     notifyUpdate(Resource.Type.Cluster, request, predicate);
     notifyUpdate(Resource.Type.Cluster, request, predicate);
-    return getRequestStatus(null);
+    return getRequestStatus(response);
   }
   }
 
 
   @Override
   @Override

+ 4 - 4
ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java

@@ -737,12 +737,12 @@ public class TestHeartbeatHandler {
     hb.setReports(new ArrayList<CommandReport>());
     hb.setReports(new ArrayList<CommandReport>());
 
 
     ArrayList<ComponentStatus> componentStatuses = new ArrayList<ComponentStatus>();
     ArrayList<ComponentStatus> componentStatuses = new ArrayList<ComponentStatus>();
-    ComponentStatus componentStatus1 =
-        createComponentStatus(DummyCluster, HDFS, DummyHostStatus, State.STARTED, DATANODE, "HDP-1.3.0");
+    ComponentStatus componentStatus1 = createComponentStatus(DummyCluster, HDFS, DummyHostStatus, State.STARTED,
+        DATANODE, "{\"stackName\":\"HDP\",\"stackVersion\":\"1.3.0\"}");
     ComponentStatus componentStatus2 =
     ComponentStatus componentStatus2 =
         createComponentStatus(DummyCluster, HDFS, DummyHostStatus, State.STARTED, NAMENODE, "");
         createComponentStatus(DummyCluster, HDFS, DummyHostStatus, State.STARTED, NAMENODE, "");
-    ComponentStatus componentStatus3 =
-        createComponentStatus(DummyCluster, HDFS, DummyHostStatus, State.INSTALLED, HDFS_CLIENT, "HDP-1.3.0");
+    ComponentStatus componentStatus3 = createComponentStatus(DummyCluster, HDFS, DummyHostStatus, State.INSTALLED,
+        HDFS_CLIENT, "{\"stackName\":\"HDP\",\"stackVersion\":\"1.3.0\"}");
 
 
     componentStatuses.add(componentStatus1);
     componentStatuses.add(componentStatus1);
     componentStatuses.add(componentStatus2);
     componentStatuses.add(componentStatus2);

+ 23 - 1
ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java

@@ -3156,6 +3156,15 @@ public class AmbariManagementControllerTest {
     reqs.add(req1);
     reqs.add(req1);
     updateHostAndCompareExpectedFailure(reqs, "The desired state for an upgrade request must be");
     updateHostAndCompareExpectedFailure(reqs, "The desired state for an upgrade request must be");
 
 
+    c1.setCurrentStackVersion(new StackId("HDP-0.2"));
+    sch1.setState(State.UPGRADING);
+    reqs.clear();
+    req1 = new ServiceComponentHostRequest(clusterName, serviceName1,
+        componentName1, host1, null, null);
+    req1.setDesiredStackId("HDP-0.2");
+    reqs.add(req1);
+    updateHostAndCompareExpectedFailure(reqs, "The desired state for an upgrade request must be");
+
     c1.setCurrentStackVersion(new StackId("HDP-0.2"));
     c1.setCurrentStackVersion(new StackId("HDP-0.2"));
     sch1.setState(State.INSTALLED);
     sch1.setState(State.INSTALLED);
     sch1.setDesiredState(State.INSTALLED);
     sch1.setDesiredState(State.INSTALLED);
@@ -3174,7 +3183,8 @@ public class AmbariManagementControllerTest {
     updateHostAndCompareExpectedFailure(reqs, "An upgrade request cannot be combined with other");
     updateHostAndCompareExpectedFailure(reqs, "An upgrade request cannot be combined with other");
 
 
     c1.setCurrentStackVersion(new StackId("HDP-0.2"));
     c1.setCurrentStackVersion(new StackId("HDP-0.2"));
-    sch1.setState(State.UPGRADING);
+    sch1.setState(State.INSTALLED);
+    sch1.setStackVersion(new StackId("HDP-0.2"));
     reqs.clear();
     reqs.clear();
     req1 = new ServiceComponentHostRequest(clusterName, serviceName1,
     req1 = new ServiceComponentHostRequest(clusterName, serviceName1,
         componentName1, host1,
         componentName1, host1,
@@ -3183,6 +3193,18 @@ public class AmbariManagementControllerTest {
     reqs.add(req1);
     reqs.add(req1);
     RequestStatusResponse resp = controller.updateHostComponents(reqs);
     RequestStatusResponse resp = controller.updateHostComponents(reqs);
     Assert.assertNull(resp);
     Assert.assertNull(resp);
+
+    c1.setCurrentStackVersion(new StackId("HDP-0.2"));
+    sch1.setState(State.INSTALLED);
+    sch1.setStackVersion(new StackId("HDP-0.2"));
+    reqs.clear();
+    req1 = new ServiceComponentHostRequest(clusterName, serviceName1,
+        componentName1, host1,
+        null, State.INSTALLED.toString());
+    req1.setDesiredStackId("HDP-0.2");
+    reqs.add(req1);
+    resp = controller.updateHostComponents(reqs);
+    Assert.assertNull(resp);
   }
   }
 
 
   private void updateHostAndCompareExpectedFailure(Set<ServiceComponentHostRequest> reqs,
   private void updateHostAndCompareExpectedFailure(Set<ServiceComponentHostRequest> reqs,