瀏覽代碼

AMBARI-967. Enhance predicate comparison. (Tom Beerbower via mahadev)

git-svn-id: https://svn.apache.org/repos/asf/incubator/ambari/branches/AMBARI-666@1406489 13f79535-47bb-0310-9956-ffa450edef68
Mahadev Konar 12 年之前
父節點
當前提交
a12bb10131
共有 100 個文件被更改,包括 4163 次插入1288 次删除
  1. 2 0
      AMBARI-666-CHANGES.txt
  2. 26 0
      ambari-agent/conf/unix/ambari-agent
  3. 28 0
      ambari-agent/conf/unix/ambari.ini
  4. 69 0
      ambari-agent/pom.xml
  5. 48 48
      ambari-agent/src/main/puppet/manifestloader/site.pp
  6. 23 23
      ambari-agent/src/main/puppet/modules/configgenerator/manifests/init.pp
  7. 1 1
      ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/client.pp
  8. 18 18
      ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/init.pp
  9. 1 1
      ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/params.pp
  10. 108 105
      ambari-agent/src/main/puppet/modules/hdp-hbase/manifests/init.pp
  11. 3 3
      ambari-agent/src/main/puppet/modules/hdp-hive/manifests/init.pp
  12. 3 3
      ambari-agent/src/main/puppet/modules/hdp-oozie/manifests/init.pp
  13. 3 3
      ambari-agent/src/main/puppet/modules/hdp-templeton/manifests/init.pp
  14. 37 37
      ambari-agent/src/main/puppet/modules/hdp/lib/puppet/parser/functions/hdp_calc_xmn_from_xms.rb
  15. 5 5
      ambari-agent/src/main/puppet/modules/hdp/manifests/params.pp
  16. 1 0
      ambari-agent/src/main/python/ambari_agent/ActionQueue.py
  17. 7 2
      ambari-agent/src/main/python/ambari_agent/AmbariConfig.py
  18. 28 12
      ambari-agent/src/main/python/ambari_agent/Controller.py
  19. 37 3
      ambari-agent/src/main/python/ambari_agent/Heartbeat.py
  20. 50 0
      ambari-agent/src/main/python/ambari_agent/NetUtil.py
  21. 18 17
      ambari-agent/src/main/python/ambari_agent/StatusCheck.py
  22. 14 5
      ambari-agent/src/main/python/ambari_agent/main.py
  23. 267 236
      ambari-agent/src/main/python/ambari_agent/manifestGenerator.py
  24. 10 6
      ambari-agent/src/main/python/ambari_agent/puppetExecutor.py
  25. 17 5
      ambari-agent/src/main/python/ambari_agent/rolesToClass.dict
  26. 3 2
      ambari-agent/src/main/python/ambari_agent/security.py
  27. 2 1
      ambari-agent/src/main/python/ambari_agent/serviceStates.dict
  28. 11 11
      ambari-agent/src/main/python/ambari_agent/test.json
  29. 3 0
      ambari-agent/src/test/python/TestCertGeneration.py
  30. 115 0
      ambari-agent/src/test/python/TestConnectionRetries.py
  31. 7 3
      ambari-agent/src/test/python/TestHeartbeat.py
  32. 95 0
      ambari-agent/src/test/python/TestStatusCheck.py
  33. 37 0
      ambari-server/conf/unix/ambari-server
  34. 3 0
      ambari-server/conf/unix/ambari.properties
  35. 36 0
      ambari-server/conf/unix/log4j.properties
  36. 153 4
      ambari-server/pom.xml
  37. 5 5
      ambari-server/src/main/assemblies/server.xml
  38. 2 2
      ambari-server/src/main/java/org/apache/ambari/server/HostNotFoundException.java
  39. 31 0
      ambari-server/src/main/java/org/apache/ambari/server/StackNotFoundException.java
  40. 11 0
      ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessor.java
  41. 65 8
      ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessorImpl.java
  42. 49 2
      ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBInMemoryImpl.java
  43. 51 0
      ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionManager.java
  44. 16 4
      ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java
  45. 0 1
      ambari-server/src/main/java/org/apache/ambari/server/actionmanager/HostAction.java
  46. 14 3
      ambari-server/src/main/java/org/apache/ambari/server/actionmanager/HostRoleCommand.java
  47. 40 21
      ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Stage.java
  48. 3 0
      ambari-server/src/main/java/org/apache/ambari/server/actionmanager/StageFactory.java
  49. 1 1
      ambari-server/src/main/java/org/apache/ambari/server/agent/CommandReport.java
  50. 4 3
      ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
  51. 7 9
      ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
  52. 54 42
      ambari-server/src/main/java/org/apache/ambari/server/agent/HostInfo.java
  53. 1 1
      ambari-server/src/main/java/org/apache/ambari/server/agent/rest/AgentResource.java
  54. 59 3
      ambari-server/src/main/java/org/apache/ambari/server/api/handlers/BaseManagementHandler.java
  55. 2 2
      ambari-server/src/main/java/org/apache/ambari/server/api/handlers/RequestHandlerFactory.java
  56. 23 8
      ambari-server/src/main/java/org/apache/ambari/server/api/query/QueryImpl.java
  57. 55 0
      ambari-server/src/main/java/org/apache/ambari/server/api/resources/ActionResourceDefinition.java
  58. 11 10
      ambari-server/src/main/java/org/apache/ambari/server/api/resources/ClusterResourceDefinition.java
  59. 15 13
      ambari-server/src/main/java/org/apache/ambari/server/api/resources/ConfigurationResourceDefinition.java
  60. 25 8
      ambari-server/src/main/java/org/apache/ambari/server/api/resources/HostResourceDefinition.java
  61. 73 0
      ambari-server/src/main/java/org/apache/ambari/server/api/resources/RequestResourceDefinition.java
  62. 71 0
      ambari-server/src/main/java/org/apache/ambari/server/api/resources/TaskResourceDefinition.java
  63. 1 1
      ambari-server/src/main/java/org/apache/ambari/server/api/rest/HealthCheck.java
  64. 124 0
      ambari-server/src/main/java/org/apache/ambari/server/api/services/ActionService.java
  65. 426 89
      ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
  66. 116 0
      ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaService.java
  67. 5 4
      ambari-server/src/main/java/org/apache/ambari/server/api/services/BaseService.java
  68. 14 6
      ambari-server/src/main/java/org/apache/ambari/server/api/services/ClusterService.java
  69. 25 7
      ambari-server/src/main/java/org/apache/ambari/server/api/services/ComponentService.java
  70. 6 4
      ambari-server/src/main/java/org/apache/ambari/server/api/services/ConfigurationService.java
  71. 3 3
      ambari-server/src/main/java/org/apache/ambari/server/api/services/CreatePersistenceManager.java
  72. 5 2
      ambari-server/src/main/java/org/apache/ambari/server/api/services/DeletePersistenceManager.java
  73. 25 7
      ambari-server/src/main/java/org/apache/ambari/server/api/services/HostComponentService.java
  74. 45 11
      ambari-server/src/main/java/org/apache/ambari/server/api/services/HostService.java
  75. 49 0
      ambari-server/src/main/java/org/apache/ambari/server/api/services/PersistKeyValueImpl.java
  76. 87 0
      ambari-server/src/main/java/org/apache/ambari/server/api/services/PersistKeyValueService.java
  77. 4 1
      ambari-server/src/main/java/org/apache/ambari/server/api/services/PersistenceManager.java
  78. 1 1
      ambari-server/src/main/java/org/apache/ambari/server/api/services/Request.java
  79. 39 15
      ambari-server/src/main/java/org/apache/ambari/server/api/services/RequestImpl.java
  80. 109 0
      ambari-server/src/main/java/org/apache/ambari/server/api/services/RequestService.java
  81. 17 3
      ambari-server/src/main/java/org/apache/ambari/server/api/services/ResponseFactory.java
  82. 8 0
      ambari-server/src/main/java/org/apache/ambari/server/api/services/Result.java
  83. 13 0
      ambari-server/src/main/java/org/apache/ambari/server/api/services/ResultImpl.java
  84. 53 8
      ambari-server/src/main/java/org/apache/ambari/server/api/services/ServiceService.java
  85. 106 0
      ambari-server/src/main/java/org/apache/ambari/server/api/services/TaskService.java
  86. 4 3
      ambari-server/src/main/java/org/apache/ambari/server/api/services/UpdatePersistenceManager.java
  87. 0 1
      ambari-server/src/main/java/org/apache/ambari/server/api/services/parsers/JsonPropertyParser.java
  88. 6 6
      ambari-server/src/main/java/org/apache/ambari/server/bootstrap/BootStrapImpl.java
  89. 3 3
      ambari-server/src/main/java/org/apache/ambari/server/bootstrap/SshHostInfo.java
  90. 21 8
      ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
  91. 70 0
      ambari-server/src/main/java/org/apache/ambari/server/controller/ActionRequest.java
  92. 51 0
      ambari-server/src/main/java/org/apache/ambari/server/controller/ActionResponse.java
  93. 61 52
      ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementController.java
  94. 463 332
      ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
  95. 62 8
      ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
  96. 12 1
      ambari-server/src/main/java/org/apache/ambari/server/controller/ClusterResponse.java
  97. 44 11
      ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
  98. 0 1
      ambari-server/src/main/java/org/apache/ambari/server/controller/HostRequest.java
  99. 37 0
      ambari-server/src/main/java/org/apache/ambari/server/controller/RequestStatusRequest.java
  100. 71 0
      ambari-server/src/main/java/org/apache/ambari/server/controller/RequestStatusResponse.java

+ 2 - 0
AMBARI-666-CHANGES.txt

@@ -12,6 +12,8 @@ AMBARI-666 branch (unreleased changes)
 
   NEW FEATURES
 
+  AMBARI-967. Enhance predicate comparison. (Tom Beerbower via mahadev)
+
   AMBARI-954. Support installation of Ganglia master and slaves via
   Ambari Web. (yusaku)
 

+ 26 - 0
ambari-agent/conf/unix/ambari-agent

@@ -0,0 +1,26 @@
+# description: ambari-agent daemon
+# processname: ambari-agent
+
+# /etc/init.d/ambari-agent
+
+case "$1" in
+  start)
+        echo -e "Starting ambari-agent"
+        export AMBARI_PASSPHRASE=pass_phrase
+        python /usr/lib/python2.6/site-packages/ambari_agent/main.py
+        ;;
+  stop)
+        echo -e "Stopping ambari-agent"
+        python /usr/lib/python2.6/site-packages/ambari_agent/main.py stop
+        ;;
+  restart)
+        echo -e "Restarting ambari-agent"
+        $0 stop
+        $0 start
+        ;;     
+  *)
+        echo "Usage: /usr/sbin/ambari-agent {start|stop|restart}"
+        exit 1
+esac
+
+exit 0

+ 28 - 0
ambari-agent/conf/unix/ambari.ini

@@ -0,0 +1,28 @@
+[server]
+hostname=ambari-rpm.cybervisiontech.com.ua
+url_port=4080
+secured_url_port=8443
+
+[agent]
+prefix=/tmp/ambari-agent
+
+[stack]
+installprefix=/var/ambari/
+
+[puppet]
+puppetmodules=/etc/ambari-agent/puppet
+puppet_home=/usr/bin/puppet
+facter_home=/usr/bin/facter
+
+[command]
+maxretries=2
+sleepBetweenRetries=1
+
+[security]
+keysdir=/etc/ambari-agent/keys
+server_crt=ca.crt
+passphrase_env_var_name=AMBARI_PASSPHRASE
+
+[services]
+serviceToPidMapFile=/usr/lib/python2.6/site-packages/ambari_agent/servicesToPidNames.dict
+pidLookupPath=/var/run/

+ 69 - 0
ambari-agent/pom.xml

@@ -96,6 +96,75 @@
           </execution>
         </executions>
       </plugin>
+
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>rpm-maven-plugin</artifactId>
+        <version>2.0.1</version>
+        <executions>
+          <execution>
+            <phase>none</phase>
+            <goals>
+              <goal>rpm</goal>
+            </goals>
+          </execution>
+        </executions>
+        <configuration>
+          <copyright>2012, Apache Software Foundation</copyright>
+          <group>Development</group>
+          <description>Maven Recipe: RPM Package.</description>
+          <mappings>
+
+            <mapping>
+              <directory>/usr/lib/python2.6/site-packages/ambari_agent</directory>
+              <sources>
+                <source>
+                  <location>${project.build.directory}/${project.artifactId}-${project.version}/ambari_agent</location>
+                </source>
+              </sources>
+            </mapping>
+
+            <mapping>
+              <directory>/etc/${project.artifactId}/puppet</directory>
+              <sources>
+                <source>
+                  <location>src/main/puppet</location>
+                </source>
+              </sources>
+            </mapping>
+
+            <mapping>
+              <directory>/etc/ambari</directory>
+              <sources>
+                <source>
+                  <location>conf/unix/ambari.ini</location>
+                </source>
+              </sources>
+            </mapping>
+
+            <mapping>
+              <directory>/usr/sbin</directory>
+              <filemode>744</filemode>
+              <sources>
+                <source>
+                  <location>conf/unix/ambari-agent</location>
+                </source>
+              </sources>
+            </mapping>
+
+            <mapping>
+              <directory>/var/run/ambari</directory>
+            </mapping>
+
+            <mapping>
+              <directory>/etc/ambari-agent/keys</directory>
+            </mapping>
+
+            <!-- -->
+          </mappings>
+        </configuration>
+      </plugin>
+
     </plugins>
     <extensions>
       <extension>

+ 48 - 48
ambari-agent/src/main/puppet/manifestloader/site.pp

@@ -1,48 +1,48 @@
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-
-class manifestloader () {
-    file { '/etc/puppet/agent/modules.tgz':
-      ensure => present,
-      source => "puppet:///modules/catalog/modules.tgz",  
-      mode => '0755',
-    }
-
-    exec { 'untar_modules':
-      command => "rm -rf /etc/puppet/agent/modules ; tar zxf /etc/puppet/agent/modules.tgz -C /etc/puppet/agent/ --strip-components 3",
-      path    => '/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'
-    } 
-
-    exec { 'puppet_apply':
-      command   => "sh /etc/puppet/agent/modules/puppetApply.sh",
-      timeout   => 1800,
-      path      => '/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin',
-      logoutput => "true"
-    }
-
-    File['/etc/puppet/agent/modules.tgz'] -> Exec['untar_modules'] -> Exec['puppet_apply']
-}
-
-node default {
- stage{1 :}
- class {'manifestloader': stage => 1}
-}
-
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+class manifestloader () {
+    file { '/etc/puppet/agent/modules.tgz':
+      ensure => present,
+      source => "puppet:///modules/catalog/modules.tgz",  
+      mode => '0755',
+    }
+
+    exec { 'untar_modules':
+      command => "rm -rf /etc/puppet/agent/modules ; tar zxf /etc/puppet/agent/modules.tgz -C /etc/puppet/agent/ --strip-components 3",
+      path    => '/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'
+    } 
+
+    exec { 'puppet_apply':
+      command   => "sh /etc/puppet/agent/modules/puppetApply.sh",
+      timeout   => 1800,
+      path      => '/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin',
+      logoutput => "true"
+    }
+
+    File['/etc/puppet/agent/modules.tgz'] -> Exec['untar_modules'] -> Exec['puppet_apply']
+}
+
+node default {
+ stage{1 :}
+ class {'manifestloader': stage => 1}
+}
+

+ 23 - 23
ambari-agent/src/main/puppet/modules/configgenerator/manifests/init.pp

@@ -1,23 +1,23 @@
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-
-class configgenerator() {
-}
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+class configgenerator() {
+}

+ 1 - 1
ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/client.pp

@@ -20,7 +20,7 @@
 #
 class hdp-hadoop::client(
   $service_state = $hdp::params::cluster_client_state
-) inherits hdp::params
+) inherits hdp-hadoop::params
 {
   $hdp::params::service_exists['hdp-hadoop::client'] = true
 

+ 18 - 18
ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/init.pp

@@ -43,57 +43,57 @@ class hdp-hadoop::initialize()
 debug('##Configs generation for hdp-hadoop')
 
 
-  if has_key($configuration, 'mapred_queue_acls') {
-    configgenerator::configfile{'mapred_queue_acls': 
+  if has_key($configuration, 'mapred-queue-acls') {
+    configgenerator::configfile{'mapred-queue-acls': 
       modulespath => $hdp-hadoop::params::conf_dir,
       filename => 'mapred-queue-acls.xml',
       module => 'hdp-hadoop',
-      configuration => $configuration['mapred_queue_acls']
+      configuration => $configuration['mapred-queue-acls']
     }
   }
   
-  if has_key($configuration, 'hadoop_policy') {
-    configgenerator::configfile{'hadoop_policy': 
+  if has_key($configuration, 'hadoop-policy') {
+    configgenerator::configfile{'hadoop-policy': 
       modulespath => $hdp-hadoop::params::conf_dir,
       filename => 'hadoop-policy.xml',
       module => 'hdp-hadoop',
-      configuration => $configuration['hadoop_policy']
+      configuration => $configuration['hadoop-policy']
     }
   }
 
-  if has_key($configuration, 'core_site') {
-      configgenerator::configfile{'core_site': 
+  if has_key($configuration, 'core-site') {
+      configgenerator::configfile{'core-site': 
         modulespath => $hdp-hadoop::params::conf_dir,
         filename => 'core-site.xml',
         module => 'hdp-hadoop',
-        configuration => $configuration['core_site']
+        configuration => $configuration['core-site']
       }
     }
 
-  if has_key($configuration, 'mapred_site') {
-    configgenerator::configfile{'mapred_site': 
+  if has_key($configuration, 'mapred-site') {
+    configgenerator::configfile{'mapred-site': 
       modulespath => $hdp-hadoop::params::conf_dir,
       filename => 'mapred-site.xml',
       module => 'hdp-hadoop',
-      configuration => $configuration['mapred_site']
+      configuration => $configuration['mapred-site']
     }
   }
   
-  if has_key($configuration, 'capacity_scheduler') {
-    configgenerator::configfile{'capacity_scheduler': 
+  if has_key($configuration, 'capacity-scheduler') {
+    configgenerator::configfile{'capacity-scheduler': 
       modulespath => $hdp-hadoop::params::conf_dir,
       filename => 'capacity-scheduler.xml',
       module => 'hdp-hadoop',
-      configuration => $configuration['capacity_scheduler']
+      configuration => $configuration['capacity-scheduler']
     }
   }
 
-  if has_key($configuration, 'hdfs_site') {
-    configgenerator::configfile{'hdfs_site': 
+  if has_key($configuration, 'hdfs-site') {
+    configgenerator::configfile{'hdfs-site': 
       modulespath => $hdp-hadoop::params::conf_dir,
       filename => 'hdfs-site.xml',
       module => 'hdp-hadoop',
-      configuration => $configuration['hdfs_site']
+      configuration => $configuration['hdfs-site']
     }
   }
 }

+ 1 - 1
ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/params.pp

@@ -95,7 +95,7 @@ class hdp-hadoop::params(
   }
 
   ### core-site
-  $fs_checkpoint_dir = hdp_default("hadoop/core-site/fs_checkpoint_dir","/tmp/dfs/namesecondary")
+  $fs_checkpoint_dir = hdp_default("hadoop/core-site/fs_checkpoint_dir","/tmp/hadoop-hdfs/dfs/namesecondary")
 
   $proxyuser_group = hdp_default("hadoop/core-site/proxyuser_group","users")
 

+ 108 - 105
ambari-agent/src/main/puppet/modules/hdp-hbase/manifests/init.pp

@@ -1,110 +1,113 @@
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-class hdp-hbase(
-  $type,
-  $service_state) 
-{
-  include hdp-hbase::params
- 
-  $hbase_user = $hdp-hbase::params::hbase_user
-  $config_dir = $hdp-hbase::params::conf_dir
-  
-  $hdp::params::component_exists['hdp-hbase'] = true
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+class hdp-hbase(
+  $type,
+  $service_state) 
+{
+  include hdp-hbase::params
+ 
+  $hbase_user = $hdp-hbase::params::hbase_user
+  $config_dir = $hdp-hbase::params::conf_dir
+  
+  $hdp::params::component_exists['hdp-hbase'] = true
+
 
-
   #Configs generation  
 
-  if has_key($configuration, 'hbase_site') {
-    configgenerator::configfile{'hbase_site': 
-      modulespath => $hdp-hbase::params::conf_dir,
-      filename => 'hbase-site.xml',
-      module => 'hdp-hbase',
-      configuration => $configuration['hbase_site']
-      }
+  if has_key($configuration, 'hbase-site') {
+    configgenerator::configfile{'hbase-site': 
+      modulespath => $hdp-hbase::params::conf_dir,
+      filename => 'hbase-site.xml',
+      module => 'hdp-hbase',
+      configuration => $configuration['hbase-site']
+      }
+    }
+    hdp-hbase::configfile { 'regionservers':}
+    Anchor['hdp-hbase::begin'] -> Hdp::Package['hbase'] -> Hdp::User[$hbase_user] -> Hdp::Directory[$config_dir] -> 
+    Hdp-hbase::Configfile<||> ->  Anchor['hdp-hbase::end']
+
+  if has_key($configuration, 'hbase-policy') {
+    configgenerator::configfile{'hbase-policy': 
+      modulespath => $hdp-hbase::params::conf_dir,
+      filename => 'hbase-policy.xml',
+      module => 'hdp-hbase',
+      configuration => $configuration['hbase-policy']
+      }
+    }
+
+  anchor{'hdp-hbase::begin':}
+  anchor{'hdp-hbase::end':}
+
+  if ($service_state == 'uninstalled') {
+    hdp::package { 'hbase':
+      ensure => 'uninstalled'
+    }
+    hdp::directory { $config_dir:
+      service_state => $service_state,
+      force => true
+    }
+
+    Anchor['hdp-hbase::begin'] -> Hdp::Package['hbase'] -> Hdp::Directory[$config_dir] -> Anchor['hdp-hbase::end']
+
+  } else {  
+    hdp::package { 'hbase': }
+  
+    hdp::user{ $hbase_user:}
+ 
+    hdp::directory { $config_dir: 
+      service_state => $service_state,
+      force => true
+    }
+
+   hdp-hbase::configfile { ['hbase-env.sh','log4j.properties','hadoop-metrics.properties']: 
+      type => $type
     }
+    hdp-hbase::configfile { 'regionservers':}
+    Anchor['hdp-hbase::begin'] -> Hdp::Package['hbase'] -> Hdp::User[$hbase_user] -> Hdp::Directory[$config_dir] -> 
+    Hdp-hbase::Configfile<||> ->  Anchor['hdp-hbase::end']
+  }
+}
 
-  if has_key($configuration, 'hbase_policy') {
-    configgenerator::configfile{'hbase_policy': 
-      modulespath => $hdp-hbase::params::conf_dir,
-      filename => 'hbase-policy.xml',
-      module => 'hdp-hbase',
-      configuration => $configuration['hbase_policy']
-      }
-    }
-
-  anchor{'hdp-hbase::begin':}
-  anchor{'hdp-hbase::end':}
-
-  if ($service_state == 'uninstalled') {
-    hdp::package { 'hbase':
-      ensure => 'uninstalled'
-    }
-    hdp::directory { $config_dir:
-      service_state => $service_state,
-      force => true
-    }
-
-    Anchor['hdp-hbase::begin'] -> Hdp::Package['hbase'] -> Hdp::Directory[$config_dir] -> Anchor['hdp-hbase::end']
-
-  } else {  
-    hdp::package { 'hbase': }
-  
-    hdp::user{ $hbase_user:}
- 
-    hdp::directory { $config_dir: 
-      service_state => $service_state,
-      force => true
-    }
-
-   hdp-hbase::configfile { ['hbase-env.sh','log4j.properties','hadoop-metrics.properties']: 
-      type => $type
-    }
-    hdp-hbase::configfile { 'regionservers':}
-    Anchor['hdp-hbase::begin'] -> Hdp::Package['hbase'] -> Hdp::User[$hbase_user] -> Hdp::Directory[$config_dir] -> 
-    Hdp-hbase::Configfile<||> ->  Anchor['hdp-hbase::end']
-  }
-}
-
-### config files
-define hdp-hbase::configfile(
-  $mode = undef,
-  $hbase_master_host = undef,
-  $template_tag = undef,
-  $type = undef,
-) 
-{
-  if ($name == 'hadoop-metrics.properties') {
-    if ($type == 'master') {
-    $tag = GANGLIA-MASTER
-  } else {
-     $tag = GANGLIA-RS
-  }
-   } else {
-    $tag = $template_tag
-}
-  hdp::configfile { "${hdp-hbase::params::conf_dir}/${name}":
-    component         => 'hbase',
-    owner             => $hdp-hbase::params::hbase_user,
-    mode              => $mode,
-    hbase_master_host => $hbase_master_host,
-    template_tag      => $tag
-  }
-}
+### config files
+define hdp-hbase::configfile(
+  $mode = undef,
+  $hbase_master_host = undef,
+  $template_tag = undef,
+  $type = undef,
+) 
+{
+  if ($name == 'hadoop-metrics.properties') {
+    if ($type == 'master') {
+    $tag = GANGLIA-MASTER
+  } else {
+     $tag = GANGLIA-RS
+  }
+   } else {
+    $tag = $template_tag
+}
+  hdp::configfile { "${hdp-hbase::params::conf_dir}/${name}":
+    component         => 'hbase',
+    owner             => $hdp-hbase::params::hbase_user,
+    mode              => $mode,
+    hbase_master_host => $hbase_master_host,
+    template_tag      => $tag
+  }
+}

+ 3 - 3
ambari-agent/src/main/puppet/modules/hdp-hive/manifests/init.pp

@@ -30,12 +30,12 @@ class hdp-hive(
 
 # Configs generation  
 
-  if has_key($configuration, 'hive_site') {
-    configgenerator::configfile{'hive_site':
+  if has_key($configuration, 'hive-site') {
+    configgenerator::configfile{'hive-site':
       modulespath => $hdp-hive::params::hive_conf_dir, 
       filename => 'hive-site.xml',
       module => 'hdp-hive',
-      configuration => $configuration['hive_site']
+      configuration => $configuration['hive-site']
     }
   }
 

+ 3 - 3
ambari-agent/src/main/puppet/modules/hdp-oozie/manifests/init.pp

@@ -28,12 +28,12 @@ class hdp-oozie(
 
 # Configs generation  
 
-  if has_key($configuration, 'oozie_site') {
-    configgenerator::configfile{'oozie_site':
+  if has_key($configuration, 'oozie-site') {
+    configgenerator::configfile{'oozie-site':
       modulespath => $hdp-oozie::params::conf_dir, 
       filename => 'oozie-site.xml',
       module => 'hdp-oozie',
-      configuration => $configuration['oozie_site']
+      configuration => $configuration['oozie-site']
     }
   }
 

+ 3 - 3
ambari-agent/src/main/puppet/modules/hdp-templeton/manifests/init.pp

@@ -25,12 +25,12 @@ class hdp-templeton(
 {
 # Configs generation  
 
-  if has_key($configuration, 'templeton_site') {
-    configgenerator::configfile{'templeton_site': 
+  if has_key($configuration, 'templeton-site') {
+    configgenerator::configfile{'templeton-site': 
       modulespath => $hdp-templeton::params::conf_dir,
       filename => 'templeton-site.xml',
       module => 'hdp-templeton',
-      configuration => $configuration['templeton_site']
+      configuration => $configuration['templeton-site']
     }
   }
 

+ 37 - 37
ambari-agent/src/main/puppet/modules/hdp/lib/puppet/parser/functions/hdp_calc_xmn_from_xms.rb

@@ -1,37 +1,37 @@
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-module Puppet::Parser::Functions
-  newfunction(:hdp_calc_xmn_from_xms, :type => :rvalue) do |args|
-    heapsize_orig_str = args[0].to_s
-    xmn_percent = args[1].to_f
-    xmn_max = args[2].to_i
-    heapsize_str = heapsize_orig_str.gsub(/\D/,"")
-    heapsize = heapsize_str.to_i
-    heapsize_unit = heapsize_orig_str.gsub(/\d/,"")
-    xmn_val = heapsize*xmn_percent
-    xmn_val = xmn_val.floor.to_i
-    xmn_val = xmn_val/8
-    xmn_val = xmn_val*8
-    xmn_val = xmn_val > xmn_max ? xmn_max : xmn_val
-    xmn_val_str = "" + xmn_val.to_s + heapsize_unit
-    xmn_val_str
-  end
-end
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+module Puppet::Parser::Functions
+  newfunction(:hdp_calc_xmn_from_xms, :type => :rvalue) do |args|
+    heapsize_orig_str = args[0].to_s
+    xmn_percent = args[1].to_f
+    xmn_max = args[2].to_i
+    heapsize_str = heapsize_orig_str.gsub(/\D/,"")
+    heapsize = heapsize_str.to_i
+    heapsize_unit = heapsize_orig_str.gsub(/\d/,"")
+    xmn_val = heapsize*xmn_percent
+    xmn_val = xmn_val.floor.to_i
+    xmn_val = xmn_val/8
+    xmn_val = xmn_val*8
+    xmn_val = xmn_val > xmn_max ? xmn_max : xmn_val
+    xmn_val_str = "" + xmn_val.to_s + heapsize_unit
+    xmn_val_str
+  end
+end

+ 5 - 5
ambari-agent/src/main/puppet/modules/hdp/manifests/params.pp

@@ -136,15 +136,15 @@ class hdp::params()
   $host_address = undef 
 
   ##### java 
-  $java32_home = hdp_default("java32_home","/usr/jdk32/jdk1.6.0_26")
-  $java64_home = hdp_default("java64_home","/usr/jdk64/jdk1.6.0_26")
+  $java32_home = hdp_default("java32_home","/usr/jdk32/jdk1.6.0_31")
+  $java64_home = hdp_default("java64_home","/usr/jdk64/jdk1.6.0_31")
   
   $wipeoff_data =  hdp_default("wipeoff_data",false) 
 
-  $jdk_location = hdp_default("jdk_location","http://download.oracle.com/otn-pub/java/jdk/6u26-b03")
+  $jdk_location = hdp_default("jdk_location","http://download.oracle.com/otn-pub/java/jdk/6u31-b03")
   $jdk_bins = hdp_default("jdk_bins",{
-    32 => "jdk-6u26-linux-i586.bin",
-    64 => "jdk-6u26-linux-x64.bin"
+    32 => "jdk-6u31-linux-i586.bin",
+    64 => "jdk-6u31-linux-x64.bin"
   })
 
   $jce_policy_zip = "jce_policy-6.zip"

+ 1 - 0
ambari-agent/src/main/python/ambari_agent/ActionQueue.py

@@ -126,6 +126,7 @@ class ActionQueue(threading.Thread):
     # assume some puppet pluing to run these commands
     roleResult = {'role' : command['role'],
                   'actionId' : commandId,
+                  'taskId' : command['taskId'],
                   'stdout' : commandresult['stdout'],
                   'clusterName' : clusterName,
                   'stderr' : commandresult['stderr'],

+ 7 - 2
ambari-agent/src/main/python/ambari_agent/AmbariConfig.py

@@ -27,12 +27,17 @@ config = ConfigParser.RawConfigParser()
 content = """
 
 [server]
-url=https://localhost:4080
-secured_url=https://localhost:8443
+hostname=localhost
+url_port=4080
+secured_url_port=8443
 
 [agent]
 prefix=/tmp/ambari-agent
 
+[services]
+serviceToPidMapFile=servicesToPidNames.dict
+pidLookupPath=/var/run/
+
 [stack]
 installprefix=/var/ambari/
 

+ 28 - 12
ambari-agent/src/main/python/ambari_agent/Controller.py

@@ -38,6 +38,7 @@ from ActionQueue import ActionQueue
 from optparse import OptionParser
 from wsgiref.simple_server import ServerHandler
 import security
+from NetUtil import NetUtil
 
 logger = logging.getLogger()
 
@@ -52,10 +53,10 @@ class Controller(threading.Thread):
     self.credential = None
     self.config = config
     self.hostname = socket.gethostname()
-    self.registerUrl = config.get('server', 'secured_url') + \
-      '/agent/register/' + self.hostname
-    self.heartbeatUrl = config.get('server', 'secured_url') + \
-       '/agent/heartbeat/' + self.hostname
+    server_secured_url = 'https://' + config.get('server', 'hostname') + ':' + config.get('server', 'secured_url_port')
+    self.registerUrl = server_secured_url + '/agent/register/' + self.hostname
+    self.heartbeatUrl = server_secured_url + '/agent/heartbeat/' + self.hostname
+    self.netutil = NetUtil()
      
   def start(self):
     self.actionQueue = ActionQueue(self.config)
@@ -88,9 +89,10 @@ class Controller(threading.Thread):
         registered = True
         pass
       except Exception, err:
+        delay = self.netutil.CONNECT_SERVER_RETRY_INTERVAL_SEC
         logger.info("Unable to connect to: " + self.registerUrl, exc_info = True)
-        """ sleep for 30 seconds and then retry again """
-        time.sleep(30)
+        """ Sleeping for {0} seconds and then retrying again """.format(delay)
+        time.sleep(delay)
         pass
       pass  
     return ret
@@ -111,18 +113,30 @@ class Controller(threading.Thread):
         pass
       pass
     pass
-  
+
+  # For testing purposes
+  DEBUG_HEARTBEAT_RETRIES = 0
+  DEBUG_SUCCESSFULL_HEARTBEATS = 0
+  DEBUG_STOP_HEARTBITTING = False
+
   def heartbeatWithServer(self):
+    self.DEBUG_HEARTBEAT_RETRIES = 0
+    self.DEBUG_SUCCESSFULL_HEARTBEATS = 0
     retry = False
     #TODO make sure the response id is monotonically increasing
     id = 0
     while True:
       try:
-        if retry==False:
+        if self.DEBUG_STOP_HEARTBITTING:
+          return
+
+        if not retry:
           data = json.dumps(self.heartbeat.build(id))
           pass
-        logger.info("Sending HeartBeat " + pprint.pformat(data))
-        req = urllib2.Request(self.heartbeatUrl, data, {'Content-Type': 
+        else:
+          self.DEBUG_HEARTBEAT_RETRIES += 1
+
+        req = urllib2.Request(self.heartbeatUrl, data, {'Content-Type':
                                                         'application/json'})
         f = security.secured_url_open(req)
         response = f.read()
@@ -137,6 +151,8 @@ class Controller(threading.Thread):
           logger.info("No commands sent from the Server.")
           pass
         retry=False
+        self.DEBUG_SUCCESSFULL_HEARTBEATS += 1
+        self.DEBUG_HEARTBEAT_RETRIES = 0
       except Exception, err:
         retry=True
         if "code" in err:
@@ -145,9 +161,9 @@ class Controller(threading.Thread):
           logger.error("Unable to connect to: "+ 
                        self.heartbeatUrl,exc_info=True)
       if self.actionQueue.isIdle():
-        time.sleep(3)
+        time.sleep(self.netutil.HEARTBEAT_IDDLE_INTERVAL_SEC)
       else:
-        time.sleep(1) 
+        time.sleep(self.netutil.HEARTBEAT_NOT_IDDLE_INTERVAL_SEC)
     pass
 
   def run(self):

+ 37 - 3
ambari-agent/src/main/python/ambari_agent/Heartbeat.py

@@ -19,11 +19,30 @@ limitations under the License.
 '''
 
 import json
+import logging
 from Hardware import Hardware
 from ActionQueue import ActionQueue
 from ServerStatus import ServerStatus
+from StatusCheck import StatusCheck
+import AmbariConfig
 import socket
 import time
+import traceback
+from pprint import pprint, pformat
+
+logger = logging.getLogger()
+
+COMPONENTS = [
+               {"serviceName" : "HDFS",
+                "componentName" : "DATANODE"},
+               {"serviceName" : "HDFS",
+                "componentName" : "NAMENODE"},
+               {"serviceName" : "HDFS",
+                "componentName" : "SECONDARYNAMENODE"}
+]
+
+LIVE_STATUS = "LIVE"
+DEAD_STATUS = "DEAD"
 
 firstContact = True
 class Heartbeat:
@@ -31,7 +50,6 @@ class Heartbeat:
   def __init__(self, actionQueue):
     self.actionQueue = actionQueue
     self.reports = []
-    self.componentStatus = []
 
   def build(self, id='-1'):
     global clusterId, clusterDefinitionRevision, firstContact
@@ -39,6 +57,22 @@ class Heartbeat:
     timestamp = int(time.time()*1000)
     queueResult = self.actionQueue.result()
     installedRoleStates = serverStatus.build()
+    pidLookupPath = AmbariConfig.config.get('services','pidLookupPath')
+    serviceToPidMapFile = AmbariConfig.config.get('services','serviceToPidMapFile')
+    statusCheck = StatusCheck(pidLookupPath, serviceToPidMapFile)
+    servicesStatusesDict = {}
+    componentStatus = []
+    for component in COMPONENTS:
+      serviceStatus = statusCheck.getStatus(component["componentName"])
+      if serviceStatus == None:
+        logger.warn("There is no service to pid mapping for " + component["componentName"])
+      status = LIVE_STATUS if serviceStatus else DEAD_STATUS 
+      componentStatus.append({"componentName" : component["componentName"],
+                                   "msg" : "",
+                                   "status" : status,
+                                   "serviceName" : component["serviceName"],
+                                   "clusterName" : ""})
+     
     
     nodeStatus = { "status" : "HEALTHY",
                    "cause" : "NONE"}
@@ -46,13 +80,13 @@ class Heartbeat:
     heartbeat = { 'responseId'        : int(id),
                   'timestamp'         : timestamp,
                   'hostname'          : socket.gethostname(),
-                 # 'componentStatus'   : self.componentStatus,
+                  'componentStatus'   : componentStatus,
                   'nodeStatus'        : nodeStatus
                 }
     if len(queueResult) != 0:
       heartbeat['reports'] = queueResult
       pass
-    
+    logger.info("Status for node heartbeat: " + pformat(nodeStatus))
     return heartbeat
 
 def main(argv=None):

+ 50 - 0
ambari-agent/src/main/python/ambari_agent/NetUtil.py

@@ -0,0 +1,50 @@
+from httplib import HTTP
+from urlparse import urlparse
+import time
+
+class NetUtil:
+
+  CONNECT_SERVER_RETRY_INTERVAL_SEC = 10
+  HEARTBEAT_IDDLE_INTERVAL_SEC = 3
+  HEARTBEAT_NOT_IDDLE_INTERVAL_SEC = 1
+
+  # Url within server to request during status check. This url
+  # should return HTTP code 200
+  SERVER_STATUS_REQUEST = "{0}/api/check"
+
+  # For testing purposes
+  DEBUG_STOP_RETRIES_FLAG = False
+
+  def checkURL(self, url):
+    """Try to connect to a given url. Result is True if url returns HTTP code 200, in any other case
+    (like unreachable server or wrong HTTP code) result will be False
+    """
+    try:
+      p = urlparse(url)
+      h = HTTP(p[1])
+      h.putrequest('HEAD', p[2])
+      h.endheaders()
+      if h.getreply()[0] == 200: return True
+      else: return False
+    except Exception, e:
+      return False
+
+  def try_to_connect(self, server_url, max_retries, logger = None):
+    """Try to connect to a given url, sleeping for CONNECT_SERVER_RETRY_INTERVAL_SEC seconds
+    between retries. No more than max_retries is performed. If max_retries is -1, connection
+    attempts will be repeated forever until server is not reachable
+    Returns count of retries
+    """
+    retries = 0
+    while (max_retries == -1 or retries < max_retries) and not self.DEBUG_STOP_RETRIES_FLAG:
+      server_is_up = self.checkURL(self.SERVER_STATUS_REQUEST.format(server_url))
+      if server_is_up:
+        break
+      else:
+        if logger is not None:
+          logger.info('Server at {0} is not reachable, sleeping for {1} seconds...'.format(server_url,
+            self.CONNECT_SERVER_RETRY_INTERVAL_SEC))
+        retries += 1
+        time.sleep(self.CONNECT_SERVER_RETRY_INTERVAL_SEC)
+    return retries
+

+ 18 - 17
ambari-agent/src/main/python/ambari_agent/StatusCheck.py

@@ -28,15 +28,17 @@ logger = logging.getLogger()
 
 
 
-def get_pair(line):
-  key, sep, value = line.strip().partition("=")
-  return key, value
+
 
 class StatusCheck:
 
+  def get_pair(self, line):
+    key, sep, value = line.strip().partition("=")
+    return key, value
+
   def listFiles(self, dir):
     basedir = dir
-    logger.info("Files in ", os.path.abspath(dir), ": ")
+    logger.debug("Files in " + os.path.abspath(dir) + ": ")
     subdirlist = []
     try:
       if os.path.isdir(dir):
@@ -53,28 +55,33 @@ class StatusCheck:
     except OSError as e:
       logger.info(e.strerror + ' to ' + e.filename)
 
-  def __init__(self, path):
+  def __init__(self, path, mappingFilePath):
+    if not os.path.isdir(path):
+      raise ValueError("Path argument must be valid directory")
+
+    if not os.path.exists(mappingFilePath):
+      raise IOError("File with services to pid mapping doesn't exists")
     self.path = path
+    self.mappingFilePath = mappingFilePath
     self.sh = shellRunner()
     self.pidFilesDict = {}
     self.listFiles(self.path)
 
 
-    with open("servicesToPidNames.dict") as fd:    
-      self.serToPidDict = dict(get_pair(line) for line in fd)
+    with open(self.mappingFilePath) as fd:    
+      self.serToPidDict = dict(self.get_pair(line) for line in fd)
 
   def getIsLive(self, pidPath):
     isLive = False
     pidFile = open(pidPath, 'r')
     pid = int(pidFile.readline())
     res = self.sh.run(['ps -p', str(pid), '-f'])
-    lines = res['output'].split('\n')
+    lines = res['output'].strip().split(os.linesep)
     try:
       procInfo = lines[1]
       isLive = not procInfo == None
     except IndexError:
       logger.info('Process is dead')
-
     return isLive
 
   def getStatus(self, serviceCode):
@@ -93,12 +100,6 @@ class StatusCheck:
       logger.info('Pid file was not found')
       return False
 
-#Temporary, for testing from console
-def main(argv=None):
-  statusCheck = StatusCheck('/var/')
-  isLive = statusCheck.getStatus(argv[1])
-  print isLive
-
-if __name__ == '__main__':
-  main(sys.argv)
+  def getSerToPidDict(self):
+    return self.serToPidDict
 

+ 14 - 5
ambari-agent/src/main/python/ambari_agent/main.py

@@ -32,6 +32,7 @@ from shell import getTempFiles
 from shell import killstaleprocesses 
 import AmbariConfig
 from security import CertificateManager
+from NetUtil import NetUtil
 
 logger = logging.getLogger()
 agentPid = os.getpid()
@@ -79,6 +80,9 @@ def debug(sig, frame):
     message += ''.join(traceback.format_stack(frame))
     logger.info(message)
 
+
+
+
 def main():
   global config
   default_cfg = { 'agent' : { 'prefix' : '/home/ambari' } }
@@ -111,8 +115,7 @@ def main():
     #retCode = createDaemon()
     pid = str(os.getpid())
     file(pidfile, 'w').write(pid)
-
-
+    
   logger.setLevel(logging.INFO)
   formatter = logging.Formatter("%(asctime)s %(filename)s:%(lineno)d - %(message)s")
   rotateLog = logging.handlers.RotatingFileHandler(logfile, "a", 10000000, 10)
@@ -123,7 +126,7 @@ def main():
   # Check for ambari configuration file.
   try:
     config = AmbariConfig.config
-    if(os.path.exists('/etc/ambari/ambari.ini')):
+    if os.path.exists('/etc/ambari/ambari.ini'):
       config.read('/etc/ambari/ambari.ini')
       AmbariConfig.setConfig(config)
     else:
@@ -132,8 +135,13 @@ def main():
     logger.warn(err)
 
   killstaleprocesses()
-  logger.info("Connecting to Server at: "+config.get('server', 'url'))
 
+  server_url = 'https://' + config.get('server', 'hostname') + ':' + config.get('server', 'url_port')
+  logger.info('Connecting to Server at: ' + server_url)
+
+  # Wait until server is reachable
+  netutil = NetUtil()
+  netutil.try_to_connect(server_url, -1, logger)
 
   #Initiate security
   """ Check if security is enable if not then disable it"""
@@ -142,8 +150,9 @@ def main():
   certMan.initSecurity()
   
   # Launch Controller communication
-  controller = Controller(config) 
+  controller = Controller(config)
   controller.start()
+  # TODO: is run() call necessary?
   controller.run()
   logger.info("finished")
     

+ 267 - 236
ambari-agent/src/main/python/ambari_agent/manifestGenerator.py

@@ -1,236 +1,267 @@
-#!/usr/bin/env python2.6
-
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-
-import json
-import os.path
-import logging
-
-logger = logging.getLogger()
-
-xml_configurations_keys= ["hdfs_site", "hdfs_site", "core_site", 
-                          "mapred_queue_acls",
-                             "hadoop_policy", "mapred_site", 
-                             "capacity_scheduler", "hbase_site",
-                             "hbase_policy", "hive_site", "oozie_site", 
-                             "templeton_site"]
-
-#read static imports from file and write them to manifest
-def writeImports(outputFile, modulesdir, inputFileName='imports.txt'):
-  inputFile = open(inputFileName, 'r')
-  logger.info("Modules dir is " + modulesdir)
-  for line in inputFile:
-    modulename = line.rstrip()
-    line = "import '" + modulesdir + os.sep + modulename + "'" + os.linesep
-    outputFile.write(line)
-    
-  inputFile.close()
-
-def generateManifest(parsedJson, fileName, modulesdir):
-#reading json
-  hostname = parsedJson['hostname']
-  clusterHostInfo = parsedJson['clusterHostInfo']
-  params = parsedJson['hostLevelParams']
-  configurations = parsedJson['configurations']
-  xmlConfigurationsKeys = xml_configurations_keys
-  #hostAttributes = parsedJson['hostAttributes']
-  roles = [{'role' : parsedJson['role'],
-            'cmd' : parsedJson['roleCommand'],
-            'roleParams' : parsedJson['roleParams']}]
-  #writing manifest
-  manifest = open(fileName, 'w')
-
-  #writing imports from external static file
-  writeImports(outputFile=manifest, modulesdir=modulesdir)
-  
-  #writing nodes
-  writeNodes(manifest, clusterHostInfo)
-  
-  #writing params from map
-  writeParams(manifest, params)
-  
-  
-  xmlConfigurations = {}
-  flatConfigurations = {}
-
-  for configKey in configurations.iterkeys():
-    if configKey in xmlConfigurationsKeys:
-      xmlConfigurations[configKey] = configurations[configKey]
-    else:
-      flatConfigurations[configKey] = configurations[configKey]
-      
-  #writing config maps
-  writeXmlConfigurations(manifest, xmlConfigurations)
-  writeFlatConfigurations(manifest, flatConfigurations)
-
-  #writing host attributes
-  #writeHostAttributes(manifest, hostAttributes)
-
-  #writing task definitions 
-  writeTasks(manifest, roles)
-     
-  manifest.close()
-    
-  
-  #read dictionary
-def readDict(file, separator='='):
-  result = dict()
-  
-  for line in file :
-    dictTuple = line.partition(separator)
-    result[dictTuple[0].strip()] = dictTuple[2].strip()
-  
-  return result
-  
-
-  #write nodes
-def writeNodes(outputFile, clusterHostInfo):
-  for node in clusterHostInfo.iterkeys():
-    outputFile.write('$' + node + '= [')
-    coma = ''
-    
-    for value in clusterHostInfo[node]:
-      outputFile.write(coma + '\'' + value + '\'')
-      coma = ', '
-
-    outputFile.write(']\n')
-
-#write params
-def writeParams(outputFile, params):
-
-  for paramName in params.iterkeys():
-
-    param = params[paramName]
-    if type(param) is dict:
-
-      outputFile.write('$' + paramName + '= {\n')
-
-      coma = ''
-
-      for subParam in param.iterkeys():
-        outputFile.write(coma + '"' + subParam + '" => "' + param[subParam] + '"')
-        coma = ',\n'
-
-      outputFile.write('\n}\n')
-    else:
-      outputFile.write('$' +  paramName + '="' + param + '"\n')
-    
-
-#write host attributes
-def writeHostAttributes(outputFile, hostAttributes):
-  outputFile.write('$hostAttributes={\n')
-
-  coma = ''
-  for attribute in hostAttributes.iterkeys():
-    outputFile.write(coma + '"' +  attribute + '" => "{' + hostAttributes[attribute] + '"}')
-    coma = ',\n'
-
-  outputFile.write('}\n')
-
-#write flat configurations
-def writeFlatConfigurations(outputFile, flatConfigs):
-  for flatConfigName in flatConfigs.iterkeys():
-    for flatConfig in flatConfigs[flatConfigName].iterkeys():
-      outputFile.write('$' + flatConfig + ' = "' + flatConfigs[flatConfigName][flatConfig] + '"' + os.linesep)
-
-#write xml configurations
-def writeXmlConfigurations(outputFile, xmlConfigs):
-  outputFile.write('$configuration =  {\n')
-
-  for configName in xmlConfigs.iterkeys():
-
-    config = xmlConfigs[configName]
-    
-    outputFile.write(configName + '=> {\n')
-    coma = ''
-    for configParam in config.iterkeys():
-      outputFile.write(coma + '"' + configParam + '" => "' + config[configParam] + '"')
-      coma = ',\n'
-
-    outputFile.write('\n},\n')
-    
-  outputFile.write('\n}\n')
-
-#write node tasks
-def writeTasks(outputFile, roles):
-  #reading dictionaries
-  rolesToClassFile = open('rolesToClass.dict', 'r')
-  rolesToClass = readDict(rolesToClassFile)
-  rolesToClassFile.close()
-
-  serviceStatesFile =  open('serviceStates.dict', 'r')
-  serviceStates = readDict(serviceStatesFile)
-  serviceStatesFile.close()
-
-  outputFile.write('node /default/ {\n ')
-  writeStages(outputFile, len(roles))
-  stageNum = 1
-
-  for role in roles :
-    rolename = role['role']
-    command = role['cmd']
-    taskParams = role['roleParams']
-    taskParamsNormalized = normalizeTaskParams(taskParams)
-    taskParamsPostfix = ''
-    
-    if len(taskParamsNormalized) > 0 :
-      taskParamsPostfix = ', ' + taskParamsNormalized
-    
-    className = rolesToClass[rolename]
-    serviceState = serviceStates[command]
-    
-    outputFile.write('class {\'' + className + '\':' + ' stage => ' + str(stageNum) + 
-                     ', service_state => ' + serviceState + taskParamsPostfix + '}\n')
-    stageNum = stageNum + 1
-  outputFile.write('}\n')
-def normalizeTaskParams(taskParams):
-  result = ''
-  coma = ''
-  
-  for paramName in taskParams.iterkeys():
-    result = coma + result + paramName + ' => ' + taskParams[paramName]
-    coma = ','
-    
-  return result
-  
-def writeStages(outputFile, numStages):
-  arrow = ''
-  
-  for i in range(numStages):
-    outputFile.write(arrow + 'stage{' + str(i + 1) + ' :}')
-    arrow = ' -> '
-  
-  outputFile.write('\n')
-    
-
-def main():
-  logging.basicConfig(level=logging.DEBUG)    
-  #test code
-  jsonFile = open('test.json', 'r')
-  jsonStr = jsonFile.read() 
-  modulesdir = os.path.abspath(os.getcwd() + ".." + os.sep + ".." + 
-                               os.sep + ".." + os.sep + "puppet" + 
-                               os.sep + "modules" + os.sep)
-  inputJsonStr = jsonStr
-  parsedJson = json.loads(inputJsonStr)
-  generateManifest(parsedJson, 'site.pp', modulesdir)
-
-if __name__ == '__main__':
-  main()
-
+#!/usr/bin/env python2.6
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+import json
+import os.path
+import logging
+from uuid import getnode as get_mac
+
+logger = logging.getLogger()
+
+xml_configurations_keys= ["hdfs-site", "core-site", 
+                          "mapred-queue-acls",
+                             "hadoop-policy", "mapred-site", 
+                             "capacity-scheduler", "hbase-site",
+                             "hbase-policy", "hive-site", "oozie-site", 
+                             "templeton-site"]
+
+#read static imports from file and write them to manifest
+def writeImports(outputFile, modulesdir, inputFileName='imports.txt'):
+  inputFile = open(inputFileName, 'r')
+  logger.info("Modules dir is " + modulesdir)
+  for line in inputFile:
+    modulename = line.rstrip()
+    line = "import '" + modulesdir + os.sep + modulename + "'" + os.linesep
+    outputFile.write(line)
+    
+  inputFile.close()
+
+def generateManifest(parsedJson, fileName, modulesdir):
+  logger.info("JSON Received:")
+  logger.info(json.dumps(parsedJson, sort_keys=True, indent=4))
+#reading json
+  hostname = parsedJson['hostname']
+  clusterHostInfo = {} 
+  if 'clusterHostInfo' in parsedJson:
+    if parsedJson['clusterHostInfo']:
+      clusterHostInfo = parsedJson['clusterHostInfo']
+  params = {}
+  if 'hostLevelParams' in parsedJson: 
+    if parsedJson['hostLevelParams']:
+      params = parsedJson['hostLevelParams']
+  configurations = {}
+  if 'configurations' in parsedJson:
+    if parsedJson['configurations']:
+      configurations = parsedJson['configurations']
+  xmlConfigurationsKeys = xml_configurations_keys
+  #hostAttributes = parsedJson['hostAttributes']
+  roleParams = {}
+  if 'roleParams' in parsedJson:
+    if parsedJson['roleParams']:
+      roleParams = parsedJson['roleParams']
+  roles = [{'role' : parsedJson['role'],
+            'cmd' : parsedJson['roleCommand'],
+            'roleParams' : roleParams}]
+  #writing manifest
+  manifest = open(fileName, 'w')
+
+  #writing imports from external static file
+  writeImports(outputFile=manifest, modulesdir=modulesdir)
+  
+  #writing nodes
+  writeNodes(manifest, clusterHostInfo)
+  
+  #writing params from map
+  writeParams(manifest, params)
+  
+  
+  xmlConfigurations = {}
+  flatConfigurations = {}
+
+  if configurations: 
+    for configKey in configurations.iterkeys():
+      if configKey in xmlConfigurationsKeys:
+        xmlConfigurations[configKey] = configurations[configKey]
+      else:
+        flatConfigurations[configKey] = configurations[configKey]
+      
+  #writing config maps
+  if (xmlConfigurations):
+    writeXmlConfigurations(manifest, xmlConfigurations)
+  if (flatConfigurations):
+    writeFlatConfigurations(manifest, flatConfigurations)
+
+  #writing host attributes
+  #writeHostAttributes(manifest, hostAttributes)
+
+  #writing task definitions 
+  writeTasks(manifest, roles)
+     
+  manifest.close()
+    
+  
+  #read dictionary
+def readDict(file, separator='='):
+  result = dict()
+  
+  for line in file :
+    dictTuple = line.partition(separator)
+    result[dictTuple[0].strip()] = dictTuple[2].strip()
+  
+  return result
+  
+
+  #write nodes
+def writeNodes(outputFile, clusterHostInfo):
+  for node in clusterHostInfo.iterkeys():
+    outputFile.write('$' + node + '= [')
+    coma = ''
+    
+    for value in clusterHostInfo[node]:
+      outputFile.write(coma + '\'' + value + '\'')
+      coma = ', '
+
+    outputFile.write(']\n')
+
+#write params
+def writeParams(outputFile, params):
+
+  for paramName in params.iterkeys():
+    # todo handle repo information properly
+    if paramName == 'repo_info':
+      continue
+
+    param = params[paramName]
+    if type(param) is dict:
+
+      outputFile.write('$' + paramName + '= {\n')
+
+      coma = ''
+
+      for subParam in param.iterkeys():
+        outputFile.write(coma + '"' + subParam + '" => "' + param[subParam] + '"')
+        coma = ',\n'
+
+      outputFile.write('\n}\n')
+    else:
+      outputFile.write('$' +  paramName + '="' + param + '"\n')
+    
+
+#write host attributes
+def writeHostAttributes(outputFile, hostAttributes):
+  outputFile.write('$hostAttributes={\n')
+
+  coma = ''
+  for attribute in hostAttributes.iterkeys():
+    outputFile.write(coma + '"' +  attribute + '" => "{' + hostAttributes[attribute] + '"}')
+    coma = ',\n'
+
+  outputFile.write('}\n')
+
+#write flat configurations
+def writeFlatConfigurations(outputFile, flatConfigs):
+  for flatConfigName in flatConfigs.iterkeys():
+    for flatConfig in flatConfigs[flatConfigName].iterkeys():
+      outputFile.write('$' + flatConfig + ' = "' + flatConfigs[flatConfigName][flatConfig] + '"' + os.linesep)
+
+#write xml configurations
+def writeXmlConfigurations(outputFile, xmlConfigs):
+  outputFile.write('$configuration =  {\n')
+
+  for configName in xmlConfigs.iterkeys():
+
+    config = xmlConfigs[configName]
+    
+    outputFile.write(configName + '=> {\n')
+    coma = ''
+    for configParam in config.iterkeys():
+      outputFile.write(coma + '"' + configParam + '" => "' + config[configParam] + '"')
+      coma = ',\n'
+
+    outputFile.write('\n},\n')
+    
+  outputFile.write('\n}\n')
+
+#write node tasks
+def writeTasks(outputFile, roles):
+  #reading dictionaries
+  rolesToClassFile = open('rolesToClass.dict', 'r')
+  rolesToClass = readDict(rolesToClassFile)
+  rolesToClassFile.close()
+
+  serviceStatesFile =  open('serviceStates.dict', 'r')
+  serviceStates = readDict(serviceStatesFile)
+  serviceStatesFile.close()
+
+  outputFile.write('node /default/ {\n ')
+  writeStages(outputFile, len(roles) + 1)
+  stageNum = 1
+  outputFile.write('class {\'hdp\': stage => ' + str(stageNum) + '}\n')
+  stageNum = stageNum + 1
+
+  for role in roles :
+    rolename = role['role']
+    command = role['cmd']
+    taskParams = role['roleParams']
+    if (rolename == 'ZOOKEEPER_SERVER'):
+      taskParams['myid'] = str(get_mac())
+    taskParamsNormalized = normalizeTaskParams(taskParams)
+    taskParamsPostfix = ''
+    
+    if len(taskParamsNormalized) > 0 :
+      taskParamsPostfix = ', ' + taskParamsNormalized
+    
+    className = rolesToClass[rolename]
+   
+    if command in serviceStates:
+      serviceState = serviceStates[command] 
+      outputFile.write('class {\'' + className + '\':' + ' stage => ' + str(stageNum) + 
+                     ', service_state => ' + serviceState + taskParamsPostfix + '}\n')
+    else:
+      outputFile.write('class {\'' + className + '\':' + ' stage => ' + str(stageNum) + 
+                     taskParamsPostfix + '}\n')
+
+    stageNum = stageNum + 1
+  outputFile.write('}\n')
+def normalizeTaskParams(taskParams):
+  result = ''
+  coma = ''
+  
+  for paramName in taskParams.iterkeys():
+    result = coma + result + paramName + ' => ' + taskParams[paramName]
+    coma = ','
+    
+  return result
+  
+def writeStages(outputFile, numStages):
+  arrow = ''
+  
+  for i in range(numStages):
+    outputFile.write(arrow + 'stage{' + str(i + 1) + ' :}')
+    arrow = ' -> '
+  
+  outputFile.write('\n')
+    
+
+def main():
+  logging.basicConfig(level=logging.DEBUG)    
+  #test code
+  jsonFile = open('test.json', 'r')
+  jsonStr = jsonFile.read() 
+  modulesdir = os.path.abspath(os.getcwd() + ".." + os.sep + ".." + 
+                               os.sep + ".." + os.sep + "puppet" + 
+                               os.sep + "modules" + os.sep)
+  inputJsonStr = jsonStr
+  parsedJson = json.loads(inputJsonStr)
+  generateManifest(parsedJson, 'site.pp', modulesdir)
+
+if __name__ == '__main__':
+  main()
+

+ 10 - 6
ambari-agent/src/main/python/ambari_agent/puppetExecutor.py

@@ -82,15 +82,19 @@ class puppetExecutor:
                                   env=puppetEnv)
     stderr_out = puppet.communicate()
     error = "none"
-    if puppet.returncode != 0:
+    returncode = 0
+    if (puppet.returncode != 0 and puppet.returncode != 2) :
+      returncode = puppet.returncode
       error = stderr_out[1]
-      result["stderr"] = error
-      logging.error("Error running puppet: " + stderr_out[1])
+      logging.error("Error running puppet: \n" + stderr_out[1])
       pass
+    result["stderr"] = error
     puppetOutput = stderr_out[0]
-    result["exitcode"] = puppet.returncode
-    result["stdout"] = puppetOutput
-    logger.info("ExitCode : \n"  + str(result["exitcode"]))
+    logger.info("Output from puppet :\n" + puppetOutput)
+    result["exitcode"] = returncode
+    
+    result["stdout"] = "Output"
+    logger.info("ExitCode : "  + str(result["exitcode"]))
     return result
  
 def main():

+ 17 - 5
ambari-agent/src/main/python/ambari_agent/rolesToClass.dict

@@ -1,6 +1,6 @@
 NAMENODE = hdp-hadoop::namenode
 DATANODE = hdp-hadoop::datanode
-SNAMENODE = hdp-hadoop::snamenode
+SECONDARY_NAMENODE = hdp-hadoop::snamenode
 JOBTRACKER = hdp-hadoop::jobtracker
 TASKTRACKER = hdp-hadoop::tasktracker
 HDFS_CLIENT = hdp-hadoop::client
@@ -10,8 +10,8 @@ ZOOKEEPER_CLIENT = hdp-zookeeper::client
 HBASE_MASTER = hdp-hbase::master
 HBASE_REGIONSERVER = hdp-hbase::regionserver
 HBASE_CLIENT = hdp-hbase::client
-PIG_CLIENT = hdp-pig
-SQOOP_CLIENT = hdp-sqoop
+PIG = hdp-pig
+SQOOP = hdp-sqoop
 OOZIE_SERVER = hdp-oozie::server
 OOZIE_CLIENT = hdp-oozie::client
 HIVE_CLIENT = hdp-hive::client
@@ -23,6 +23,18 @@ TEMPLETON_SERVER = hdp-templeton::server
 TEMPLETON_CLIENT = hdp-templeton::client
 DASHBOARD = hdp-dashboard
 NAGIOS_SERVER = hdp-nagios::server
-GANGLIA_MONITOR_SERVER = hdp-ganglia::server
+GANGLIA_SERVER = hdp-ganglia::server
 GANGLIA_MONITOR = hdp-ganglia::monitor
-HTTPD = hdp-monitor-webserver
+HTTPD = hdp-monitor-webserver
+HDFS_SERVICE_CHECK = hdp-hadoop::hdfs::service_check
+MAPREDUCE_SERVICE_CHECK = hdp-hadoop::mapred::service_check
+ZOOKEEPER_SERVICE_CHECK = hdp-zookeeper::zookeeper::service_check
+ZOOKEEPER_QUORUM_SERVICE_CHECK = hdp-zookeeper::quorum::service_check
+HBASE_SERVICE_CHECK = hdp-hbase::hbase::service_check
+HIVE_SERVICE_CHECK = hdp-hive::hive::service_check
+HCAT_SERVICE_CHECK = hdp-hcat::hcat::service_check
+OOZIE_SERVICE_CHECK = hdp-oozie::oozie::service_check
+PIG_SERVICE_CHECK = hdp-pig::pig::service_check
+SQOOP_SERVICE_CHECK = hdp-sqoop::sqoop::service_check
+TEMPLETON_SERVICE_CHECK = hdp-templeton::templeton::service_check
+DASHBOARD_SERVICE_CHECK = hdp-dashboard::dashboard::service_check

+ 3 - 2
ambari-agent/src/main/python/ambari_agent/security.py

@@ -55,6 +55,7 @@ class CertificateManager():
     self.config = config
     self.keysdir = self.config.get('security', 'keysdir')
     self.server_crt=self.config.get('security', 'server_crt')
+    self.server_url = 'https://' + self.config.get('server', 'hostname') + ':' + self.config.get('server', 'url_port')
     
   def getAgentKeyName(self):
     keysdir = self.config.get('security', 'keysdir')
@@ -98,7 +99,7 @@ class CertificateManager():
         logger.info("Agent certificate exists, ok")
             
   def loadSrvrCrt(self):
-    get_ca_url = self.config.get('server', 'url') + '/cert/ca/'
+    get_ca_url = self.server_url + '/cert/ca/'
     stream = urllib2.urlopen(get_ca_url)
     response = stream.read()
     stream.close()
@@ -106,7 +107,7 @@ class CertificateManager():
     srvr_crt_f.write(response)
       
   def reqSignCrt(self):
-    sign_crt_req_url = self.config.get('server', 'url') + '/certs/' + socket.gethostname()
+    sign_crt_req_url = self.server_url + '/certs/' + socket.gethostname()
     agent_crt_req_f = open(self.getAgentCrtReqName())
     agent_crt_req_content = agent_crt_req_f.read()
     passphrase_env_var = self.config.get('security', 'passphrase_env_var_name')

+ 2 - 1
ambari-agent/src/main/python/ambari_agent/serviceStates.dict

@@ -1,2 +1,3 @@
 START = running
-INSTALL = installed_and_configured
+INSTALL = installed_and_configured
+STOP = stopped

+ 11 - 11
ambari-agent/src/main/python/ambari_agent/test.json

@@ -19,26 +19,26 @@
 
 
 "configurations" : {
-"hdfs_site" : { "dfs.block.size" : "256000000", "dfs.replication" : "1" } ,
-"core_site" :  { "fs.default.name" : "hrt8n36.cc1.ygridcore.net" } ,
-"mapred_queue_acls" : {"mapred.queue.default.acl-submit-job" : "*",
+"hdfs-site" : { "dfs.block.size" : "256000000", "dfs.replication" : "1" } ,
+"core-site" :  { "fs.default.name" : "hrt8n36.cc1.ygridcore.net" } ,
+"mapred-queue-acls" : {"mapred.queue.default.acl-submit-job" : "*",
 		       "mapred.queue.default.acl-administer-jobs" : "*"},
-"hadoop_policy" : {"security.client.protocol.acl" : "*",
+"hadoop-policy" : {"security.client.protocol.acl" : "*",
 		   "security.client.datanode.protocol.acl" : "*"},
-"mapred_site" : {"mapred.jobtracker.taskScheduler" : "org.apache.hadoop.mapred.CapacityTaskScheduler",
+"mapred-site" : {"mapred.jobtracker.taskScheduler" : "org.apache.hadoop.mapred.CapacityTaskScheduler",
 		 "mapred.queue.names" : "hive,pig,default"},
-"capacity_scheduler" : {"mapred.capacity-scheduler.queue.default.capacity" : "100",
+"capacity-scheduler" : {"mapred.capacity-scheduler.queue.default.capacity" : "100",
 			"mapred.capacity-scheduler.queue.default.supports-priorit" : "false"},
 "health_check" : {"security_enabled" : "true",
                   "task_bin_exe" : "ls"},
 "hadoop_env" : {"hadoop_piddirprefix" : "/tmp"},
 
-"hbase_site" : {"hbase.cluster.distributed" : "true"},
-"hbase_policy" : {"security.client.protocol.acl" : "*"},
+"hbase-site" : {"hbase.cluster.distributed" : "true"},
+"hbase-policy" : {"security.client.protocol.acl" : "*"},
 "hadoop_metrics" : {"ganglia_server_host" : "localhost"},
-"hive_site" : {"hive.exec.scratchdir" : "/tmp"},
-"oozie_site" : {"oozie.service.ActionService.executor.ext.classes" : "org.apache.oozie.action.hadoop.HiveActionExecutor, org.apache.oozie.action.hadoop.SqoopActionExecutor,org.apache.oozie.action.email.EmailActionExecutor,"},
-"templeton_site" : {"templeton.override.enabled" : "true"}
+"hive-site" : {"hive.exec.scratchdir" : "/tmp"},
+"oozie-site" : {"oozie.service.ActionService.executor.ext.classes" : "org.apache.oozie.action.hadoop.HiveActionExecutor, org.apache.oozie.action.hadoop.SqoopActionExecutor,org.apache.oozie.action.email.EmailActionExecutor,"},
+"templeton-site" : {"templeton.override.enabled" : "true"}
 },
 
 "role": "NAMENODE",

+ 3 - 0
ambari-agent/src/test/python/TestCertGeneration.py

@@ -30,6 +30,9 @@ class TestCertGeneration(TestCase):
   def setUp(self):
     self.tmpdir = tempfile.mkdtemp()
     config = ConfigParser.RawConfigParser()
+    config.add_section('server')
+    config.set('server', 'hostname', 'example.com')
+    config.set('server', 'url_port', '777')
     config.add_section('security')
     config.set('security', 'keysdir', self.tmpdir)
     config.set('security', 'server_crt', 'ca.crt')

+ 115 - 0
ambari-agent/src/test/python/TestConnectionRetries.py

@@ -0,0 +1,115 @@
+#!/usr/bin/env python2.6
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+from unittest import TestCase
+from ambari_agent.ServerStatus import ServerStatus
+from ambari_agent.NetUtil import NetUtil
+import ambari_agent.main
+from threading import Thread
+import time
+from ambari_agent.Heartbeat import Heartbeat
+from ambari_agent.ActionQueue import ActionQueue
+from ambari_agent import AmbariConfig
+import socket
+import os
+import logging
+from ambari_agent.Controller import Controller
+import logging
+
+NON_EXISTING_DOMAIN = 'non-existing-domain43342432.com'
+BAD_URL = 'http://www.iana.org/domains/ex222ample/'
+
+class TestConnectionRetries(TestCase):
+
+  logger = logging.getLogger()
+
+  def setUp(self):
+    self.logger.disabled = True
+
+
+  def test_url_checks(self):
+    netutil = NetUtil()
+    self.assertEquals(netutil.checkURL('http://www.iana.org/domains/example/'), True, "Good url - HTTP code 200")
+    self.assertEquals(netutil.checkURL('https://www.iana.org/domains/example/'), True, "Good HTTPS url - HTTP code 200")
+    self.assertEquals(netutil.checkURL('http://' + NON_EXISTING_DOMAIN), False, "Not existing domain")
+    self.assertEquals(netutil.checkURL(BAD_URL), False, "Bad url")
+    self.assertEquals(netutil.checkURL('http://192.168.253.177'), False, "Not reachable IP")
+
+  def test_registration_retries(self):
+    netutil = NetUtil()
+    netutil.CONNECT_SERVER_RETRY_INTERVAL_SEC=0.1
+    retries = netutil.try_to_connect(BAD_URL, 3)
+    self.assertEquals(retries, 3)
+
+  def test_infinit_registration_retries(self):
+    netutil = NetUtil()
+    netutil.CONNECT_SERVER_RETRY_INTERVAL_SEC=0.1
+    thread = Thread(target = netutil.try_to_connect, args = (BAD_URL, -1))
+    thread.start()
+    time.sleep(0.5)
+    # I have to stop the thread anyway, so I'll check results later
+    threadWasAlive = thread.isAlive()
+    netutil.DEBUG_STOP_RETRIES_FLAG = True
+    time.sleep(1)
+    # Checking results before thread stop
+    self.assertEquals(threadWasAlive, True, "Thread should still be retrying to connect")
+    # Checking results after thread stop
+    self.assertEquals(thread.isAlive(), False, "Thread should stop now")
+
+  def test_heartbeat_retries(self):
+    netutil = NetUtil()
+    netutil.HEARTBEAT_IDDLE_INTERVAL_SEC=0.1
+    netutil.HEARTBEAT_NOT_IDDLE_INTERVAL_SEC=0.1
+    #building heartbeat object
+    testsPath = os.path.dirname(os.path.realpath(__file__))
+    dictPath = testsPath + os.sep + '..' + os.sep + '..' + os.sep + 'main' + os.sep + 'python' + os.sep + 'ambari_agent' + os.sep + 'servicesToPidNames.dict'
+    AmbariConfig.config.set('services','serviceToPidMapFile', dictPath)
+    actionQueue = ActionQueue(AmbariConfig.AmbariConfig().getConfig())
+    heartbeat = Heartbeat(actionQueue)
+    # testing controller with our heartbeat and wrong url
+    controller = Controller(AmbariConfig.config)
+    controller.heartbeat = heartbeat
+    controller.heartbeatUrl = BAD_URL
+    controller.actionQueue = actionQueue
+    controller.logger = self.logger
+    controller.netutil = netutil
+    thread = Thread(target =  controller.heartbeatWithServer)
+    thread.start()
+    time.sleep(1)
+
+    # I have to stop the thread anyway, so I'll check results later
+    threadWasAlive = thread.isAlive()
+    successfull_heartbits0 = controller.DEBUG_SUCCESSFULL_HEARTBEATS
+    heartbeat_retries0 = controller.DEBUG_HEARTBEAT_RETRIES
+    # Stopping thread
+    controller.DEBUG_STOP_HEARTBITTING = True
+    time.sleep(1)
+    # Checking results before thread stop
+    self.assertEquals(threadWasAlive, True, "Heartbeat should be alive now")
+    self.assertEquals(successfull_heartbits0, 0, "Heartbeat should not have any success")
+    self.assertGreater(heartbeat_retries0, 1, "Heartbeat should retry connecting")
+    # Checking results after thread stop
+    self.assertEquals(thread.isAlive(), False, "Heartbeat should stop now")
+    self.assertEquals(controller.DEBUG_SUCCESSFULL_HEARTBEATS, 0, "Heartbeat should not have any success")
+
+  def tearDown(self):
+    self.logger.disabled = False
+
+

+ 7 - 3
ambari-agent/src/test/python/TestHeartbeat.py

@@ -21,12 +21,16 @@ limitations under the License.
 from unittest import TestCase
 from ambari_agent.Heartbeat import Heartbeat
 from ambari_agent.ActionQueue import ActionQueue
-from ambari_agent.AmbariConfig import AmbariConfig
+from ambari_agent import AmbariConfig
 import socket
+import os
 
 class TestHeartbeat(TestCase):
   def test_build(self):
-    actionQueue = ActionQueue(AmbariConfig().getConfig())
+    testsPath = os.path.dirname(os.path.realpath(__file__))
+    dictPath = testsPath + os.sep + '..' + os.sep + '..' + os.sep + 'main' + os.sep + 'python' + os.sep + 'ambari_agent' + os.sep + 'servicesToPidNames.dict'
+    AmbariConfig.config.set('services','serviceToPidMapFile', dictPath)
+    actionQueue = ActionQueue(AmbariConfig.AmbariConfig().getConfig())
     heartbeat = Heartbeat(actionQueue)
     result = heartbeat.build(100)
-  
+  

+ 95 - 0
ambari-agent/src/test/python/TestStatusCheck.py

@@ -0,0 +1,95 @@
+#!/usr/bin/env python2.6
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+import tempfile
+import shutil
+import os
+from unittest import TestCase
+from ambari_agent.StatusCheck import StatusCheck
+import subprocess
+import signal
+from shell import shellRunner
+
+
+MAPPING_FILE_NAME='map.dict'
+
+COMPONENT_LIVE = 'LIVE_COMPONENT'
+COMPONENT_LIVE_PID = 'live_comp.pid'
+COMPONENT_LIVE_CMD='''
+while [ 1==1 ]
+do
+   echo ok
+done
+'''
+
+COMPONENT_DEAD = 'DEAD_COMPONENT'
+COMPONENT_DEAD_PID = 'dead_comp.pid'
+DEAD_PID=0
+
+
+class TestStatusCheck(TestCase):
+
+  def setUp(self):
+    self.tmpdir = tempfile.mkdtemp()
+    self.tmpdict = tempfile.NamedTemporaryFile(dir=self.tmpdir)
+    self.tmpdict = open(self.tmpdir + os.sep + MAPPING_FILE_NAME, 'w')
+
+    self.sh = shellRunner()
+    
+    #Launch eternal process
+    p = subprocess.Popen([COMPONENT_LIVE_CMD], stdout=subprocess.PIPE, 
+                         stderr=subprocess.PIPE, shell=True, close_fds=True)
+
+    #Write mapping for pid files for both live and dead process
+    self.tmpdict.write(COMPONENT_LIVE + '=' + COMPONENT_LIVE_PID + os.linesep)
+    self.tmpdict.write(COMPONENT_DEAD + '=' + COMPONENT_DEAD_PID + os.linesep)
+    self.tmpdict.close()
+
+    #Write pid of live process to file
+    live_pid_file = open(self.tmpdir + os.sep + COMPONENT_LIVE_PID, 'w')
+    self.live_pid = p.pid
+    live_pid_file.write(str(self.live_pid))
+    live_pid_file.close()
+
+    #Write pid of dead process to file
+    dead_pid_file = open(self.tmpdir + os.sep + COMPONENT_DEAD_PID, 'w')
+    dead_pid_file.write(str(DEAD_PID))
+    dead_pid_file.close()
+
+    #Init status checker
+    self.statusCheck = StatusCheck(self.tmpdir, self.tmpdict.name)
+
+  # Ensure that status checker throws exceptions on invalid params
+  def test_exceptions(self):
+    self.assertRaises(ValueError,StatusCheck,"tmp","tmp")
+    self.assertRaises(IOError, StatusCheck,self.tmpdir,"tmp")
+
+  # Ensure that status checker return True for running process
+  def test_live(self):
+    status = self.statusCheck.getStatus(COMPONENT_LIVE)
+    self.assertEqual(status, True)
+
+  # Ensure that status checker return False for dead process
+  def test_dead(self):
+    status = self.statusCheck.getStatus(COMPONENT_DEAD)
+    self.assertEqual(status, False)
+
+  def tearDown(self):
+    os.kill(self.live_pid, signal.SIGKILL)
+    shutil.rmtree(self.tmpdir)

+ 37 - 0
ambari-server/conf/unix/ambari-server

@@ -0,0 +1,37 @@
+# description: ambari-server daemon
+# processname: ambari-server
+
+# /etc/init.d/ambari-server
+
+export PATH=/usr/lib/ambari-server/*:$PATH
+export AMBARI_CONF_DIR=/etc/ambari-server/conf:$PATH
+
+case "$1" in
+  start)
+        echo -e "Starting ambari-server"
+        export AMBARI_PASSPHRASE=pass_phrase
+        python /usr/sbin/ambari-server.py start
+        ;;
+  stop)
+        echo -e "Stopping ambari-server"
+        python /usr/sbin/ambari-server.py stop
+        ;;
+  restart)
+        echo -e "Restarting ambari-server"
+        $0 stop
+        $0 start
+        ;;
+  setup)
+        echo -e "Run postgresql initdb"
+        service postgresql initdb
+        echo -e "Run postgresql start"
+        service postgresql start
+        echo -e "Setup ambari-server"
+        python /usr/sbin/ambari-server.py setup -d postgres -f /var/lib/ambari-server/resources/Ambari-DDL.sql
+        ;;
+  *)
+        echo "Usage: /usr/sbin/ambari-server {start|stop|restart|setup}"
+        exit 1
+esac
+
+exit 0

+ 3 - 0
ambari-server/conf/unix/ambari.properties

@@ -0,0 +1,3 @@
+security.server.keys_dir = /var/lib/ambari-server/keys
+resources.dir = /var/lib/ambari-server/resources
+jdk.url=http://public-repo-1.hortonworks.com/ARTIFACTS/jdk-6u31-linux-x64.bin

+ 36 - 0
ambari-server/conf/unix/log4j.properties

@@ -0,0 +1,36 @@
+# Copyright 2011 The Apache Software Foundation
+# 
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Define some default values that can be overridden by system properties
+# Root logger option
+log4j.rootLogger=DEBUG, stdout, file
+
+# Direct log messages to stdout
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.Target=System.out
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%d{ABSOLUTE} %5p %c{1}:%L - %m%n
+
+# Direct log messages to a log file
+log4j.appender.file=org.apache.log4j.RollingFileAppender
+log4j.appender.file.File=/var/log/ambari/ambari-server.log
+log4j.appender.file.MaxFileSize=1MB
+log4j.appender.file.MaxBackupIndex=1
+log4j.appender.file.layout=org.apache.log4j.PatternLayout
+log4j.appender.file.layout.ConversionPattern=%d{ABSOLUTE} %5p %c{1}:%L - %m%n
+

+ 153 - 4
ambari-server/pom.xml

@@ -52,10 +52,143 @@
           </includes>
         </configuration>
       </plugin>
+
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>rpm-maven-plugin</artifactId>
+        <version>2.0.1</version>
+        <executions>
+          <execution>
+            <!-- unbinds rpm creation from maven lifecycle -->
+            <phase>none</phase>
+            <goals>
+              <goal>rpm</goal>
+            </goals>
+          </execution>
+
+        </executions>
+        <configuration>
+          <!-- places rpm to specified folder -->
+          <!--
+          <workarea>
+            rpm-target
+          </workarea>
+          -->
+          <copyright>2012, Apache Software Foundation</copyright>
+          <group>Development</group>
+          <description>Maven Recipe: RPM Package.</description>
+          <mappings>
+
+            <mapping>
+              <directory>/usr/lib/ambari-server</directory>
+              <dependency>
+              </dependency>
+            </mapping>
+
+            <mapping>
+              <directory>/usr/lib/ambari-server</directory>
+              <sources>
+                <source>
+                  <location>${project.build.directory}/${project.artifactId}-${project.version}.jar</location>
+                </source>
+              </sources>
+            </mapping>
+
+
+            <mapping>
+              <directory>/usr/sbin</directory>
+              <sources>
+                <source>
+                  <location>src/main/python/ambari-server.py</location>
+                </source>
+              </sources>
+            </mapping>
+
+            <mapping>
+              <directory>/usr/sbin</directory>
+              <filemode>744</filemode>
+              <sources>
+                <source>
+                  <location>conf/unix/ambari-server</location>
+                </source>
+              </sources>
+            </mapping>
+
+            <mapping>
+              <directory>/etc/ambari-server/conf</directory>
+              <configuration>true</configuration>
+              <sources>
+                <source>
+                  <location>conf/unix/ambari.properties</location>
+                </source>
+                <source>
+                  <location>conf/unix/log4j.properties</location>
+                </source>
+              </sources>
+            </mapping>
+
+            <mapping>
+              <directory>/var/lib/ambari-server/keys</directory>
+              <sources>
+                <source>
+                  <location>src/main/resources/ca.config</location>
+                </source>
+              </sources>
+            </mapping>
+
+            <mapping>
+              <directory>/var/lib/ambari-server/keys/db</directory>
+              <sources>
+                <source>
+                  <location>src/main/resources/db</location>
+                </source>
+              </sources>
+            </mapping>
+
+            <mapping>
+              <directory>/var/log/ambari</directory>
+            </mapping>
+
+            <mapping>
+              <directory>/var/lib/ambari-server/resources</directory>
+              <sources>
+                <source>
+                  <location>src/main/resources/Ambari-DDL.sql</location>
+                </source>
+              </sources>
+            </mapping>
+
+            <mapping>
+              <directory>/var/run/ambari-server</directory>
+            </mapping>
+
+          </mappings>
+        </configuration>
+      </plugin>
+
+      <plugin>
+          <groupId>org.codehaus.mojo</groupId>
+          <artifactId>findbugs-maven-plugin</artifactId>
+          <version>2.5.2</version>
+          <configuration>
+            <failOnError>false</failOnError>
+          </configuration>
+          <executions>
+            <execution>
+              <phase>verify</phase>
+              <goals>
+                <goal>check</goal>
+              </goals>
+            </execution>
+          </executions>
+      </plugin>
+
     </plugins>
   </build>
   <profiles>
   </profiles>
+
+
   <dependencies>
     <dependency>
       <groupId>commons-io</groupId>
@@ -77,10 +210,6 @@
       <groupId>com.google.inject.extensions</groupId>
       <artifactId>guice-servlet</artifactId>
     </dependency>
-    <dependency>
-      <groupId>com.google.code.gson</groupId>
-      <artifactId>gson</artifactId>
-    </dependency>
     <dependency>
       <groupId>org.apache.derby</groupId>
       <artifactId>derby</artifactId>
@@ -260,5 +389,25 @@
       <artifactId>guice</artifactId>
       <version>3.0</version>
     </dependency>
+    <dependency>
+      <groupId>com.google.code.gson</groupId>
+      <artifactId>gson</artifactId>
+      <version>2.2.2</version>
+    </dependency>
+    <dependency>
+      <groupId>postgresql</groupId>
+      <artifactId>postgresql</artifactId>
+      <version>8.3-603.jdbc4</version>
+    </dependency>
   </dependencies>
+
+    <!--<reporting>
+        <plugins>
+            <plugin>
+                <groupId>org.codehaus.mojo</groupId>
+                <artifactId>findbugs-maven-plugin</artifactId>
+                <version>2.5.2</version>
+            </plugin>
+        </plugins>
+    </reporting>-->
 </project>

+ 5 - 5
ambari-server/src/main/assemblies/server.xml

@@ -75,11 +75,11 @@
       <outputDirectory>/ambari-server-${project.version}/etc/ambari-server/conf</outputDirectory>
     </fileSet>
     <fileSet>
-      <directory>src/main/assemblies</directory>
-      <outputDirectory>/ambari-server-${project.version}/res</outputDirectory>
-      <excludes>
-        <exclude>*</exclude>
-      </excludes>
+      <directory>src/main/resources</directory>
+      <outputDirectory>/ambari-server-${project.version}/var/lib/ambari-server/resources/</outputDirectory>
+      <includes>
+        <include>stacks/**</include>
+      </includes>
     </fileSet>
   </fileSets>
   <dependencySets>

+ 2 - 2
ambari-server/src/main/java/org/apache/ambari/server/HostNotFoundException.java

@@ -21,7 +21,7 @@ package org.apache.ambari.server;
 @SuppressWarnings("serial")
 public class HostNotFoundException extends AmbariException {
 
-  public HostNotFoundException(String hostName) {
-    super("Host not found, hostName=" + hostName);
+  public HostNotFoundException(String hostname) {
+    super("Host not found, hostname=" + hostname);
   }
 }

+ 31 - 0
ambari-server/src/main/java/org/apache/ambari/server/StackNotFoundException.java

@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server;
+
+@SuppressWarnings("serial")
+public class StackNotFoundException extends AmbariException {
+
+  public StackNotFoundException (String stackName,
+      String stackVersion) {
+    super("Stack Information not found"
+        + ", stackName=" + stackName
+        + ", stackVersion=" + stackVersion);
+  }
+
+}

+ 11 - 0
ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessor.java

@@ -17,7 +17,9 @@
  */
 package org.apache.ambari.server.actionmanager;
 
+import java.util.Collection;
 import java.util.List;
+import java.util.Set;
 
 import org.apache.ambari.server.Role;
 import org.apache.ambari.server.agent.CommandReport;
@@ -62,4 +64,13 @@ public interface ActionDBAccessor {
    */
   public void hostRoleScheduled(Stage s, String hostname, String roleStr);
 
+  public List<HostRoleCommand> getRequestTasks(long requestId);
+
+  public Collection<HostRoleCommand> getTasks(Collection<Long> taskIds);
+
+  public List<Stage> getStagesByHostRoleStatus(Set<HostRoleStatus> statuses);
+
+  public List<Long> getRequests();
+  
+  public HostRoleCommand getTask(long taskId);
 }

+ 65 - 8
ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessorImpl.java

@@ -21,22 +21,33 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.List;
+import java.util.Set;
 
-import com.google.inject.Inject;
-import com.google.inject.Injector;
-import com.google.inject.persist.Transactional;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.Role;
 import org.apache.ambari.server.agent.CommandReport;
-
-import com.google.inject.Singleton;
-import org.apache.ambari.server.orm.dao.*;
-import org.apache.ambari.server.orm.entities.*;
+import org.apache.ambari.server.orm.dao.ClusterDAO;
+import org.apache.ambari.server.orm.dao.ExecutionCommandDAO;
+import org.apache.ambari.server.orm.dao.HostDAO;
+import org.apache.ambari.server.orm.dao.HostRoleCommandDAO;
+import org.apache.ambari.server.orm.dao.RoleSuccessCriteriaDAO;
+import org.apache.ambari.server.orm.dao.StageDAO;
+import org.apache.ambari.server.orm.entities.ClusterEntity;
+import org.apache.ambari.server.orm.entities.ExecutionCommandEntity;
+import org.apache.ambari.server.orm.entities.HostEntity;
+import org.apache.ambari.server.orm.entities.HostRoleCommandEntity;
+import org.apache.ambari.server.orm.entities.RoleSuccessCriteriaEntity;
+import org.apache.ambari.server.orm.entities.StageEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.inject.Inject;
+import com.google.inject.Injector;
+import com.google.inject.Singleton;
+import com.google.inject.persist.Transactional;
+
 @Singleton
 public class ActionDBAccessorImpl implements ActionDBAccessor {
   private static final Logger LOG = LoggerFactory.getLogger(ActionDBAccessorImpl.class);
@@ -56,6 +67,8 @@ public class ActionDBAccessorImpl implements ActionDBAccessor {
   @Inject
   private StageFactory stageFactory;
   @Inject
+  private HostRoleCommandFactory hostRoleCommandFactory;
+  @Inject
   private Clusters clusters;
 
   private final long requestId;
@@ -192,7 +205,11 @@ public class ActionDBAccessorImpl implements ActionDBAccessor {
   @Transactional
   public void updateHostRoleState(String hostname, long requestId,
       long stageId, String role, CommandReport report) {
-    List<HostRoleCommandEntity> commands = hostRoleCommandDAO.findByHostRole(hostname, requestId, stageId, Role.valueOf(role));
+    LOG.info("Update HostRoleState: "
+        + "HostName " + hostname + " requestId " + requestId + " stageId "
+        + stageId + " role " + role + " report " + report);
+    List<HostRoleCommandEntity> commands = hostRoleCommandDAO.findByHostRole(
+        hostname, requestId, stageId, Role.valueOf(role));
     for (HostRoleCommandEntity command : commands) {
       command.setStatus(HostRoleStatus.valueOf(report.getStatus()));
       command.setStdOut(report.getStdOut());
@@ -233,4 +250,44 @@ public class ActionDBAccessorImpl implements ActionDBAccessor {
     }
 
   }
+
+  @Override
+  public List<HostRoleCommand> getRequestTasks(long requestId) {
+    List<HostRoleCommand> tasks = new ArrayList<HostRoleCommand>();
+    for (HostRoleCommandEntity hostRoleCommandEntity : hostRoleCommandDAO.findByRequest(requestId)) {
+      tasks.add(hostRoleCommandFactory.createExisting(hostRoleCommandEntity));
+    }
+    return tasks;
+  }
+
+  @Override
+  public Collection<HostRoleCommand> getTasks(Collection<Long> taskIds) {
+    List<HostRoleCommand> commands = new ArrayList<HostRoleCommand>();
+    for (HostRoleCommandEntity commandEntity : hostRoleCommandDAO.findByPKs(taskIds)) {
+      commands.add(hostRoleCommandFactory.createExisting(commandEntity));
+    }
+    return commands;
+  }
+
+  @Override
+  public List<Stage> getStagesByHostRoleStatus(Set<HostRoleStatus> statuses) {
+    List<Stage> stages = new ArrayList<Stage>();
+    for (StageEntity stageEntity : stageDAO.findByCommandStatuses(statuses)) {
+      stages.add(stageFactory.createExisting(stageEntity));
+    }
+    return stages;
+  }
+
+  @Override
+  public List<Long> getRequests() {
+    return hostRoleCommandDAO.getRequests();
+  }
+    
+  public HostRoleCommand getTask(long taskId) {
+    HostRoleCommandEntity commandEntity = hostRoleCommandDAO.findByPK((int)taskId);
+    if (commandEntity == null) {
+      return null;
+    }
+    return hostRoleCommandFactory.createExisting(commandEntity);
+  }
 }

+ 49 - 2
ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBInMemoryImpl.java

@@ -18,7 +18,10 @@
 package org.apache.ambari.server.actionmanager;
 
 import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashSet;
 import java.util.List;
+import java.util.Set;
 
 import org.apache.ambari.server.Role;
 import org.apache.ambari.server.agent.CommandReport;
@@ -117,7 +120,7 @@ public class ActionDBInMemoryImpl implements ActionDBAccessor {
       }
     }
   }
-  
+
   @Override
   public void abortHostRole(String host, long requestId, long stageId, Role role) {
     CommandReport report = new CommandReport();
@@ -132,9 +135,53 @@ public class ActionDBInMemoryImpl implements ActionDBAccessor {
   public synchronized long getLastPersistedRequestIdWhenInitialized() {
     return lastRequestId;
   }
-  
+
   @Override
   public void hostRoleScheduled(Stage s, String hostname, String roleStr) {
     //Nothing needed for in-memory implementation
   }
+
+  @Override
+  public List<HostRoleCommand> getRequestTasks(long requestId) {
+    return null;
+  }
+
+  @Override
+  public Collection<HostRoleCommand> getTasks(Collection<Long> taskIds) {
+    return null;
+  }
+  
+  @Override
+  public List<Stage> getStagesByHostRoleStatus(Set<HostRoleStatus> statuses) {
+    List<Stage> l = new ArrayList<Stage>();
+    for (Stage s: stageList) {
+      if (s.doesStageHaveHostRoleStatus(statuses)) {
+        l.add(s);
+      }
+    }
+    return l;
+  }
+  @Override
+  public synchronized List<Long> getRequests() {
+    Set<Long> requestIds = new HashSet<Long>();
+    for (Stage s: stageList) {
+      requestIds.add(s.getRequestId());
+    }
+    List<Long> ids = new ArrayList<Long>();
+    ids.addAll(requestIds);
+    return ids;
+  }
+
+  public HostRoleCommand getTask(long taskId) {
+    for (Stage s : stageList) {
+      for (String host : s.getHosts()) {
+        for (ExecutionCommand cmd : s.getExecutionCommands(host)) {
+          if (cmd.getTaskId() == taskId) {
+            return s.getHostRoleCommand(host, cmd.getRole().toString());
+          }
+        }
+      }
+    }
+    return null;
+  }
 }

+ 51 - 0
ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionManager.java

@@ -17,7 +17,9 @@
  */
 package org.apache.ambari.server.actionmanager;
 
+import java.util.Collection;
 import java.util.List;
+import java.util.Set;
 import java.util.concurrent.atomic.AtomicLong;
 
 import org.apache.ambari.server.agent.ActionQueue;
@@ -84,12 +86,34 @@ public class ActionManager {
     }
     //persist the action response into the db.
     for (CommandReport report : reports) {
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("Processing command report : " + report.toString());
+      }
       String actionId = report.getActionId();
       long [] requestStageIds = StageUtils.getRequestStage(actionId);
       long requestId = requestStageIds[0];
       long stageId = requestStageIds[1];
+      HostRoleCommand command = db.getTask(report.getTaskId());
+      if (command == null) {
+        LOG.warn("The task " + report.getTaskId()
+            + " is invalid");
+        continue;
+      }
+      if (!command.getStatus().equals(HostRoleStatus.IN_PROGRESS)
+          && !command.getStatus().equals(HostRoleStatus.QUEUED)) {
+        LOG.warn("The task " + command.getTaskId()
+            + " is not in progress, ignoring update");
+        continue;
+      }
       db.updateHostRoleState(hostname, requestId, stageId, report.getRole(),
           report);
+      List<HostRoleCommand> commands = db.getRequestTasks(requestId);
+      LOG.debug("List of commands " + (commands == null ? 0: commands.size()));
+      if (commands != null) {
+        for (HostRoleCommand cmd : commands) {
+          LOG.info("******COMMAND DUMP*****" + cmd);
+        }
+      }
     }
   }
 
@@ -104,4 +128,31 @@ public class ActionManager {
     return requestCounter.incrementAndGet();
   }
 
+  public List<HostRoleCommand> getRequestTasks(long requestId) {
+    List<HostRoleCommand> commands = db.getRequestTasks(requestId);
+    LOG.debug("GETTING List of commands for request Id " + requestId + " : " +
+        (commands == null ? 0: commands.size()));
+    if (commands != null) {
+      for (HostRoleCommand command : commands) {
+        LOG.info("******GETTING COMMAND DUMP*****" + command);
+      }
+    }
+    return commands;
+  }
+
+  public Collection<HostRoleCommand> getTasks(Collection<Long> taskIds) {
+    return db.getTasks(taskIds);
+  }
+
+  public List<Stage> getRequestsByHostRoleStatus(Set<HostRoleStatus> statuses) {
+    return db.getStagesByHostRoleStatus(statuses);
+  }
+
+  /**
+   * Returns last 20 requests
+   * @return
+   */
+  public List<Long> getRequests() {
+    return db.getRequests();
+  }
 }

+ 16 - 4
ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java

@@ -23,6 +23,7 @@ import java.util.Map;
 import java.util.TreeMap;
 
 import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.ServiceComponentNotFoundException;
 import org.apache.ambari.server.agent.ActionQueue;
 import org.apache.ambari.server.agent.ExecutionCommand;
 import org.apache.ambari.server.state.Cluster;
@@ -89,10 +90,14 @@ class ActionScheduler implements Runnable {
 
   private void doWork() throws AmbariException {
     List<Stage> stages = db.getStagesInProgress();
-    LOG.info("Scheduler wakes up");
+    if (LOG.isDebugEnabled()) {
+      LOG.info("Scheduler wakes up");
+    }
     if (stages == null || stages.isEmpty()) {
       //Nothing to do
-      LOG.info("No stage in progress..nothing to do");
+      if (LOG.isDebugEnabled()) {
+        LOG.info("No stage in progress..nothing to do");
+      }
       return;
     }
 
@@ -186,6 +191,8 @@ class ActionScheduler implements Runnable {
               ServiceComponentHost svcCompHost =
                   svcComp.getServiceComponentHost(host);
               svcCompHost.handleEvent(timeoutEvent);
+            } catch (ServiceComponentNotFoundException scnex) {
+              LOG.info("Not a service component, assuming its an action", scnex);
             } catch (InvalidStateTransitionException e) {
               LOG.info("Transition failed for host: "+host+", role: "+roleStr, e);
             } catch (AmbariException ex) {
@@ -235,8 +242,8 @@ class ActionScheduler implements Runnable {
         ServiceComponentHost svcCompHost =
             svcComp.getServiceComponentHost(hostname);
         svcCompHost.handleEvent(s.getFsmEvent(hostname, roleStr));
-        s.setStartTime(hostname,roleStr, now);
-        s.setHostRoleStatus(hostname, roleStr, HostRoleStatus.QUEUED);
+      } catch (ServiceComponentNotFoundException scnex) {
+        LOG.info("Not a service component, assuming its an action", scnex);
       } catch (InvalidStateTransitionException e) {
         LOG.info(
             "Transition failed for host: " + hostname + ", role: "
@@ -247,6 +254,8 @@ class ActionScheduler implements Runnable {
             e);
         throw e;
       }
+      s.setStartTime(hostname,roleStr, now);
+      s.setHostRoleStatus(hostname, roleStr, HostRoleStatus.QUEUED);
     }
     s.setLastAttemptTime(hostname, roleStr, now);
     s.incrementAttemptCount(hostname, roleStr);
@@ -279,6 +288,9 @@ class ActionScheduler implements Runnable {
       break;
     case ABORTED:
       rs.numAborted++;
+      break;
+    default:
+      LOG.error("Unknown status " + status.name());
     }
   }
 

+ 0 - 1
ambari-server/src/main/java/org/apache/ambari/server/actionmanager/HostAction.java

@@ -20,7 +20,6 @@ package org.apache.ambari.server.actionmanager;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.ambari.server.agent.AgentCommand;
 import org.apache.ambari.server.agent.ExecutionCommand;
 import org.apache.ambari.server.utils.StageUtils;
 

+ 14 - 3
ambari-server/src/main/java/org/apache/ambari/server/actionmanager/HostRoleCommand.java

@@ -42,7 +42,8 @@ import java.io.IOException;
 public class HostRoleCommand {
   private static final Logger log = LoggerFactory.getLogger(HostRoleCommand.class);
 
-  private int taskId = -1;
+  private long taskId = -1;
+  private long stageId = -1;
   private String hostName;
   private final Role role;
   private HostRoleStatus status = HostRoleStatus.PENDING;
@@ -66,6 +67,7 @@ public class HostRoleCommand {
   @AssistedInject
   public HostRoleCommand(@Assisted HostRoleCommandEntity hostRoleCommandEntity, Injector injector) {
     taskId = hostRoleCommandEntity.getTaskId();
+    stageId = hostRoleCommandEntity.getStage().getStageId();
     this.hostName = hostRoleCommandEntity.getHostName();
     role = hostRoleCommandEntity.getRole();
     status = hostRoleCommandEntity.getStatus();
@@ -126,11 +128,11 @@ public class HostRoleCommand {
   }
 
 
-  public int getTaskId() {
+  public long getTaskId() {
     return taskId;
   }
 
-  public void setTaskId(int taskId) {
+  public void setTaskId(long taskId) {
       this.taskId = taskId;
       executionCommand.setTaskId(taskId);
   }
@@ -211,6 +213,14 @@ public class HostRoleCommand {
     this.executionCommand = executionCommand;
   }
 
+  public long getStageId() {
+    return stageId;
+  }
+
+  public void setStageId(long stageId) {
+    this.stageId = stageId;
+  }
+
   @Override
   public int hashCode() {
     return role.hashCode();
@@ -229,6 +239,7 @@ public class HostRoleCommand {
   public String toString() {
     StringBuilder builder = new StringBuilder();
     builder.append("HostRoleCommand State:\n");
+    builder.append("  TaskId: " + taskId + "\n");
     builder.append("  Role: " + role + "\n");
     builder.append("  Status: " + status + "\n");
     builder.append("  Event: " + event + "\n");

+ 40 - 21
ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Stage.java

@@ -21,6 +21,7 @@ import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 import java.util.TreeMap;
 
 import com.google.inject.Injector;
@@ -54,10 +55,11 @@ public class Stage {
   //Map of host to host-roles
   Map<String, Map<String, HostRoleCommand>> hostRoleCommands =
       new TreeMap<String, Map<String, HostRoleCommand>>();
-  private Map<String, List<ExecutionCommand>> commandsToSend = 
+  private Map<String, List<ExecutionCommand>> commandsToSend =
       new TreeMap<String, List<ExecutionCommand>>();
 
-  public Stage(long requestId, String logDir, String clusterName) {
+  @AssistedInject
+  public Stage(@Assisted long requestId, @Assisted("logDir") String logDir, @Assisted("clusterName") String clusterName) {
     this.requestId = requestId;
     this.logDir = logDir;
     this.clusterName = clusterName;
@@ -106,7 +108,7 @@ public class Stage {
   public StageEntity constructNewPersistenceEntity() {
     StageEntity stageEntity = new StageEntity();
     stageEntity.setRequestId(requestId);
-    stageEntity.setStageId(stageId);
+    stageEntity.setStageId(getStageId());
     stageEntity.setLogInfo(logDir);
     stageEntity.setHostRoleCommands(new ArrayList<HostRoleCommandEntity>());
     stageEntity.setRoleSuccessCriterias(new ArrayList<RoleSuccessCriteriaEntity>());
@@ -153,7 +155,7 @@ public class Stage {
   }
 
   public String getActionId() {
-    return StageUtils.getActionId(requestId, stageId);
+    return StageUtils.getActionId(requestId, getStageId());
   }
 
   /**
@@ -162,7 +164,7 @@ public class Stage {
    * adds them to the Stage. This should be called only once for a host-role
    * for a given stage.
    */
-  public synchronized void addHostRoleExecutionCommand(String host, Role role,  RoleCommand command, 
+  public synchronized void addHostRoleExecutionCommand(String host, Role role,  RoleCommand command,
       ServiceComponentHostEvent event, String clusterName, String serviceName) {
     Log.info("Adding host role command for role: "+role+", command: "+command
         +", event: "+event+", clusterName: "+clusterName+", serviceName: "+serviceName);
@@ -198,12 +200,12 @@ public class Stage {
     }
     execCmdList.add(cmd);
   }
-  
+
   /**
-   * 
+   *
    * @return list of hosts
    */
-  public synchronized List<String> getHosts() {
+  public synchronized List<String> getHosts() { // TODO: Check whether method should be synchronized
     List<String> hlist = new ArrayList<String>();
     for (String h : this.hostRoleCommands.keySet()) {
       hlist.add(h);
@@ -219,11 +221,11 @@ public class Stage {
       return f;
     }
   }
-  
+
   public synchronized void setSuccessFactors(Map<Role, Float> suc) {
     successFactors = suc;
   }
-  
+
   public synchronized Map<Role, Float> getSuccessFactors() {
     return successFactors;
   }
@@ -260,7 +262,7 @@ public class Stage {
     }
     return null;
   }
-  
+
   public List<ExecutionCommand> getExecutionCommands(String hostname) {
     return this.commandsToSend.get(hostname);
   }
@@ -268,29 +270,29 @@ public class Stage {
   public long getStartTime(String hostname, String role) {
     return this.hostRoleCommands.get(hostname).get(role).getStartTime();
   }
-  
+
   public void setStartTime(String hostname, String role, long startTime) {
     this.hostRoleCommands.get(hostname).get(role).setStartTime(startTime);
   }
-  
+
   public HostRoleStatus getHostRoleStatus(String hostname, String role) {
     return this.hostRoleCommands.get(hostname).get(role).getStatus();
   }
-  
+
   public void setHostRoleStatus(String host, String role,
       HostRoleStatus status) {
     this.hostRoleCommands.get(host).get(role).setStatus(status);
   }
-  
+
   public ServiceComponentHostEvent getFsmEvent(String hostname, String roleStr) {
     return this.hostRoleCommands.get(hostname).get(roleStr).getEvent();
   }
-  
+
 
   public void setExitCode(String hostname, String role, int exitCode) {
     this.hostRoleCommands.get(hostname).get(role).setExitCode(exitCode);
   }
-  
+
   public int getExitCode(String hostname, String role) {
     return this.hostRoleCommands.get(hostname).get(role).getExitCode();
   }
@@ -302,7 +304,7 @@ public class Stage {
   public void setStdout(String hostname, String role, String stdOut) {
     this.hostRoleCommands.get(hostname).get(role).setStdout(stdOut);
   }
-  
+
   public synchronized boolean isStageInProgress() {
     for(String host: hostRoleCommands.keySet()) {
       for (String role : hostRoleCommands.get(host).keySet()) {
@@ -311,7 +313,7 @@ public class Stage {
           return false;
         }
         if (hrc.getStatus().equals(HostRoleStatus.PENDING) ||
-            hrc.getStatus().equals(HostRoleStatus.QUEUED) || 
+            hrc.getStatus().equals(HostRoleStatus.QUEUED) ||
             hrc.getStatus().equals(HostRoleStatus.IN_PROGRESS)) {
           return true;
         }
@@ -320,6 +322,23 @@ public class Stage {
     return false;
   }
 
+  public synchronized boolean doesStageHaveHostRoleStatus(
+      Set<HostRoleStatus> statuses) {
+    for(String host: hostRoleCommands.keySet()) {
+      for (String role : hostRoleCommands.get(host).keySet()) {
+        HostRoleCommand hrc = hostRoleCommands.get(host).get(role);
+        if (hrc == null) {
+          return false;
+        }
+        for (HostRoleStatus status : statuses)
+        if (hrc.getStatus().equals(status)) {
+          return true;
+        }
+      }
+    }
+    return false;
+  }
+
   public Map<String, List<ExecutionCommand>> getExecutionCommands() {
     return this.commandsToSend;
   }
@@ -331,7 +350,7 @@ public class Stage {
   /**
    * This method should be used only in stage planner. To add
    * a new execution command use
-   * {@link #addHostRoleExecutionCommand(String, Role, RoleCommand, 
+   * {@link #addHostRoleExecutionCommand(String, Role, RoleCommand,
    * ServiceComponentHostEvent, String, String)}
    */
   public synchronized void addExecutionCommand(Stage origStage,
@@ -353,7 +372,7 @@ public class Stage {
   HostRoleCommand getHostRoleCommand(String hostname, String role) {
     return hostRoleCommands.get(hostname).get(role);
   }
-  
+
   @Override //Object
   public String toString() {
     StringBuilder builder = new StringBuilder();

+ 3 - 0
ambari-server/src/main/java/org/apache/ambari/server/actionmanager/StageFactory.java

@@ -18,10 +18,13 @@
 
 package org.apache.ambari.server.actionmanager;
 
+import com.google.inject.assistedinject.Assisted;
 import org.apache.ambari.server.orm.entities.StageEntity;
 
 public interface StageFactory {
 
+  Stage createNew(long requestId, @Assisted("logDir") String logDir, @Assisted("clusterName") String clusterName);
+
   Stage createExisting(String actionId);
 
   Stage createExisting(StageEntity stageEntity);

+ 1 - 1
ambari-server/src/main/java/org/apache/ambari/server/agent/CommandReport.java

@@ -124,6 +124,6 @@ public class CommandReport {
   
   @Override
   public String toString() {
-    return actionId + "-" + role;
+    return taskId + " " + role + " " + status + " "  + exitCode;
   }
 }

+ 4 - 3
ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java

@@ -41,7 +41,7 @@ public class ExecutionCommand extends AgentCommand {
     super(AgentCommandType.EXECUTION_COMMAND);
   }
   private String clusterName;
-  private int taskId;
+  private long taskId;
   private String commandId;
   private String hostname;
   private Role role;
@@ -91,12 +91,12 @@ public class ExecutionCommand extends AgentCommand {
   }
 
   @JsonProperty("taskId")
-  public int getTaskId() {
+  public long getTaskId() {
     return taskId;
   }
 
   @JsonProperty("taskId")
-  public void setTaskId(int taskId) {
+  public void setTaskId(long taskId) {
     this.taskId = taskId;
   }
 
@@ -189,4 +189,5 @@ public class ExecutionCommand extends AgentCommand {
   public void setServiceName(String serviceName) {
     this.serviceName = serviceName;
   }
+
 }

+ 7 - 9
ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java

@@ -23,6 +23,7 @@ import java.util.Set;
 
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.HostNotFoundException;
+import org.apache.ambari.server.ServiceComponentNotFoundException;
 import org.apache.ambari.server.actionmanager.ActionManager;
 import org.apache.ambari.server.agent.AgentCommand.AgentCommandType;
 import org.apache.ambari.server.state.AgentVersion;
@@ -76,16 +77,13 @@ public class HeartBeatHandler {
   public HeartBeatResponse handleHeartBeat(HeartBeat heartbeat)
       throws AmbariException {
     HeartBeatResponse response = new HeartBeatResponse();
- response.setResponseId(0L);
+    response.setResponseId(0L);
     String hostname = heartbeat.getHostname();
     LOG.info("Action queue reference = "+actionQueue);
     LOG.info("Heartbeat received from host " + heartbeat.getHostname()
         + " responseId=" + heartbeat.getResponseId());
     Host hostObject = clusterFsm.getHost(hostname);
-    // FIXME need to remove this hack
-    hostObject.refresh();
     long now = System.currentTimeMillis();
-    hostObject.refresh();
 
     try {
       if (heartbeat.getNodeStatus().getStatus()
@@ -114,12 +112,10 @@ public class HeartBeatHandler {
       if (service == null || "".equals(service)) {
         throw new AmbariException("Invalid command report, service: "+service);
       }
-      Service svc = cl.getService(service);
-      ServiceComponent svcComp = svc.getServiceComponent(
-          report.getRole());
-      ServiceComponentHost scHost = svcComp.getServiceComponentHost(
-          hostname);
       try {
+        Service svc = cl.getService(service);
+        ServiceComponent svcComp = svc.getServiceComponent(report.getRole());
+        ServiceComponentHost scHost = svcComp.getServiceComponentHost(hostname);
         if (report.getStatus().equals("COMPLETED")) {
           scHost.handleEvent(new ServiceComponentHostOpSucceededEvent(scHost
               .getServiceComponentName(), hostname, now));
@@ -128,6 +124,8 @@ public class HeartBeatHandler {
           scHost.handleEvent(new ServiceComponentHostOpFailedEvent(scHost
               .getServiceComponentName(), hostname, now));
         }
+      } catch (ServiceComponentNotFoundException scnex) {
+        LOG.info("Not a service component, assuming its an action", scnex);
       } catch (InvalidStateTransitionException ex) {
         LOG.warn("State machine exception", ex);
       }

+ 54 - 42
ambari-server/src/main/java/org/apache/ambari/server/agent/HostInfo.java

@@ -65,7 +65,7 @@ public class HostInfo {
   public String getArchitecture() {
     return this.architecture;
   }
-  
+
   @JsonProperty("architecture")
   public void setArchitecture(String architecture) {
     this.architecture = architecture;
@@ -75,7 +75,7 @@ public class HostInfo {
   public String getDomain() {
     return this.domain;
   }
-  
+
   @JsonProperty("domain")
   public void setDomain(String domain) {
     this.domain = domain;
@@ -85,7 +85,7 @@ public class HostInfo {
   public String getFQDN() {
     return this.fqdn;
   }
-  
+
   @JsonProperty("fqdn")
   public void setFQDN(String fqdn) {
     this.fqdn = fqdn;
@@ -100,7 +100,7 @@ public class HostInfo {
   public void setHardwareIsa(String hardwareisa) {
     this.hardwareisa = hardwareisa;
   }
-  
+
   @JsonProperty("hardwaremodel")
   public String getHardwareModel() {
     return this.hardwaremodel;
@@ -115,7 +115,7 @@ public class HostInfo {
   public String getHostName() {
     return this.hostname;
   }
-  
+
   @JsonProperty("hostname")
   public void setHostName(String hostname) {
     this.hostname = hostname;
@@ -135,7 +135,7 @@ public class HostInfo {
   public String getInterfaces() {
     return this.interfaces;
   }
-  
+
   @JsonProperty("interfaces")
   public void setInterfaces(String interfaces) {
     this.interfaces = interfaces;
@@ -145,7 +145,7 @@ public class HostInfo {
   public String getIPAddress() {
     return this.ipaddress;
   }
-  
+
   @JsonProperty("ipaddress")
   public void setIPAddress(String ipaddress) {
     this.ipaddress = ipaddress;
@@ -160,27 +160,27 @@ public class HostInfo {
   public void setKernel(String kernel) {
     this.kernel = kernel;
   }
-  
+
   @JsonProperty("kernelmajversion")
   public String getKernelMajVersion() {
     return this.kernelmajversion;
   }
-  
+
   @JsonProperty("kernelmajversion")
   public void setKernelMajVersion(String kernelmajversion) {
     this.kernelmajversion = kernelmajversion;
   }
-  
+
   @JsonProperty("kernelrelease")
   public String getKernelRelease() {
     return this.kernelrelease;
   }
-  
+
   @JsonProperty("kernelrelease")
   public void setKernelRelease(String kernelrelease) {
     this.kernelrelease = kernelrelease;
   }
-  
+
   @JsonProperty("kernelversion")
   public String getKernelVersion() {
     return this.kernelversion;
@@ -205,32 +205,32 @@ public class HostInfo {
   public long getFreeMemory() {
     return this.memoryfree;
   }
-  
+
   @JsonProperty("memoryfree")
   public void setFreeMemory(long memoryfree) {
     this.memoryfree = memoryfree;
   }
-  
+
   @JsonProperty("memorysize")
   public long getMemorySize() {
     return this.memorysize;
   }
-  
+
   @JsonProperty("memorysize")
   public void setMemorySize(long memorysize) {
     this.memorysize = memorysize;
   }
-  
+
   @JsonProperty("mounts")
   public List<DiskInfo> getMounts() {
     return this.mounts;
   }
-  
+
   @JsonProperty("mounts")
   public void setMounts(List<DiskInfo> mounts) {
     this.mounts = mounts;
   }
-  
+
   @JsonProperty("memorytotal")
   public long getMemoryTotal() {
     return this.memorytotal;
@@ -240,12 +240,12 @@ public class HostInfo {
   public void setMemoryTotal(long memorytotal) {
     this.memorytotal = memorytotal;
   }
-  
+
   @JsonProperty("netmask")
   public String getNetMask() {
     return this.netmask;
   }
-  
+
   @JsonProperty("netmask")
   public void setNetMask(String netmask) {
     this.netmask = netmask;
@@ -255,7 +255,7 @@ public class HostInfo {
   public String getOS() {
     return this.operatingsystem;
   }
-  
+
   @JsonProperty("operatingsystem")
   public void setOS(String operatingsystem) {
     this.operatingsystem = operatingsystem;
@@ -265,27 +265,27 @@ public class HostInfo {
   public String getOSRelease() {
     return this.operatingsystemrelease;
   }
-  
+
   @JsonProperty("operatingsystemrelease")
   public void setOSRelease(String operatingsystemrelease) {
     this.operatingsystemrelease = operatingsystemrelease;
   }
-  
+
   @JsonProperty("osfamily")
   public String getOSFamily() {
     return this.osfamily;
   }
-  
+
   @JsonProperty("osfamily")
   public void setOSFamily(String osfamily) {
     this.osfamily = osfamily;
   }
-  
+
   @JsonProperty("physicalprocessorcount")
   public int getPhysicalProcessorCount() {
     return this.physicalprocessorcount;
   }
-  
+
   @JsonProperty("physicalprocessorcount")
   public void setPhysicalProcessorCount(int physicalprocessorcount) {
     this.physicalprocessorcount = physicalprocessorcount;
@@ -295,7 +295,7 @@ public class HostInfo {
   public int getProcessorCount() {
     return this.processorcount;
   }
-  
+
   @JsonProperty("processorcount")
   public void setProcessorCount(int processorcount) {
     this.processorcount = processorcount;
@@ -305,37 +305,37 @@ public class HostInfo {
   public boolean getSeLinux() {
     return selinux;
   }
-  
+
   @JsonProperty("selinux")
   public void setSeLinux(boolean selinux) {
     this.selinux = selinux;
   }
-  
+
   @JsonProperty("swapfree")
   public String getSwapFree() {
     return this.swapfree;
   }
-  
+
   @JsonProperty("swapfree")
   public void setSwapFree(String swapfree) {
     this.swapfree = swapfree;
   }
-  
+
   @JsonProperty("swapsize")
   public String getSwapSize() {
     return swapsize;
   }
-  
+
   @JsonProperty("swapsize")
   public void setSwapSize(String swapsize) {
     this.swapsize = swapsize;
   }
- 
+
   @JsonProperty("timezone")
   public String getTimeZone() {
     return this.timezone;
   }
-  
+
   @JsonProperty("timezone")
   public void setTimeZone(String timezone) {
     this.timezone = timezone;
@@ -345,7 +345,7 @@ public class HostInfo {
   public String getUptime() {
     return this.uptime;
   }
-  
+
   @JsonProperty("uptime")
   public void setUpTime(String uptime) {
     this.uptime = uptime;
@@ -355,7 +355,7 @@ public class HostInfo {
   public long getUptimeHours() {
     return this.uptime_hours;
   }
-  
+
   @JsonProperty("uptime_hours")
   public void setUpTimeHours(long uptime_hours) {
     this.uptime_hours = uptime_hours;
@@ -365,24 +365,36 @@ public class HostInfo {
   public long getUpTimeDays() {
     return this.uptime_days;
   }
-  
+
   @JsonProperty("uptime_days")
   public void setUpTimeDays(long uptime_days) {
     this.uptime_days = uptime_days;
   }
 
   private String getDiskString() {
-    String ret = "";
+    if (mounts == null) {
+      return null;
+    }
+    StringBuilder ret = new StringBuilder();
     for (DiskInfo diskInfo : mounts) {
-      ret = ret + "(" + diskInfo.toString() + ")";
+      ret.append("(").append(diskInfo.toString()).append(")");
     }
-    return ret;
+    return ret.toString();
   }
 
   public String toString() {
-    return "[memory=" + this.memorytotal + "," +
+    return "[" +
+        "hostname=" + this.hostname + "," +
+        "fqdn=" + this.fqdn + "," +
+        "domain=" + this.domain + "," +
+        "architecture=" + this.architecture + "," +
+        "processorcount=" + this.processorcount + "," +
+        "physicalprocessorcount=" + this.physicalprocessorcount + "," +
+        "osname=" + this.operatingsystem + "," +
+        "osversion=" + this.operatingsystemrelease + "," +
+        "osfamily=" + this.osfamily + "," +
+        "memory=" + this.memorytotal + "," +
         "uptime_hours=" + this.uptime_hours + "," +
-        "operatingsystem=" + this.operatingsystem + "," +
         "mounts=" + getDiskString() + "]\n";
   }
 }

+ 1 - 1
ambari-server/src/main/java/org/apache/ambari/server/agent/rest/AgentResource.java

@@ -100,7 +100,7 @@ public class AgentResource {
   public HeartBeatResponse heartbeat(HeartBeat message)
       throws WebApplicationException {
     LOG.info("Received Heartbeat message " + message);
-    HeartBeatResponse heartBeatResponse = new HeartBeatResponse();
+    HeartBeatResponse heartBeatResponse;
     try {
       heartBeatResponse = hh.handleHeartBeat(message);
     } catch (Exception e) {

+ 59 - 3
ambari-server/src/main/java/org/apache/ambari/server/api/handlers/BaseManagementHandler.java

@@ -22,6 +22,12 @@ import org.apache.ambari.server.api.resources.ResourceDefinition;
 import org.apache.ambari.server.api.services.Request;
 import org.apache.ambari.server.api.services.Result;
 import org.apache.ambari.server.api.services.ResultImpl;
+import org.apache.ambari.server.api.util.TreeNode;
+import org.apache.ambari.server.controller.spi.RequestStatus;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.utilities.PropertyHelper;
+
+import java.util.Set;
 
 /**
  * Base handler for operations that persist state to the back-end.
@@ -31,9 +37,59 @@ public class BaseManagementHandler implements RequestHandler {
   public Result handleRequest(Request request) {
     ResourceDefinition resource = request.getResourceDefinition();
     resource.setProperties(request.getHttpBodyProperties());
-    request.getPersistenceManager().persist(resource);
+    RequestStatus status = request.getPersistenceManager().persist(resource);
+
+    return createResult(request, status);
+  }
+
+  private Result createResult(Request request, RequestStatus requestStatus) {
+    boolean isSynchronous = requestStatus.getStatus() == RequestStatus.Status.Complete;
+
+    Result result = new ResultImpl(isSynchronous);
+    TreeNode<Resource> tree = result.getResultTree();
+
+    Set<Resource> setResources = requestStatus.getAssociatedResources();
+    TreeNode<Resource> resourcesNode = null;
+    if (! setResources.isEmpty()) {
+      resourcesNode = tree.addChild(null, "resources");
+    }
+    int count = 1;
+    for (Resource resource : setResources) {
+      //todo: provide a more meaningful node name
+      resourcesNode.addChild(resource, resource.getType() + ":" + count++);
+    }
+
+    if (! isSynchronous) {
+      Resource requestResource = requestStatus.getRequestResource();
+      TreeNode<Resource> r = tree.addChild(requestResource, "request");
+      String requestHref = buildRequestHref(request, requestStatus);
+      r.setProperty("href", requestHref);
+    }
+
+    return result;
+  }
+
+  //todo: this needs to be rewritten and needs to support operating on clusters collection
+  private String buildRequestHref(Request request, RequestStatus requestStatus) {
+    StringBuilder sb = new StringBuilder();
+    String origHref = request.getURI();
+    String[] toks = origHref.split("/");
+
+    for (int i = 0; i < toks.length; ++i) {
+      String s = toks[i];
+      sb.append(s).append('/');
+      if ("clusters".equals(s)) {
+        sb.append(toks[i + 1]).append('/');
+        break;
+      }
+    }
+
+    //todo: shouldn't know property name
+    Object requestId = requestStatus.getRequestResource().getPropertyValue(
+        PropertyHelper.getPropertyId("id", "Requests"));
+
+    sb.append("requests/").append(requestId);
 
-    //todo: what to return from persist?  Possibly just the href of the updated resource.
-    return new ResultImpl();
+    return sb.toString();
   }
 }

+ 2 - 2
ambari-server/src/main/java/org/apache/ambari/server/api/handlers/RequestHandlerFactory.java

@@ -35,9 +35,9 @@ public class RequestHandlerFactory {
     switch (requestType) {
       case GET:
         return new ReadHandler();
-      case PUT:
-        return new CreateHandler();
       case POST:
+        return new CreateHandler();
+      case PUT:
         return new UpdateHandler();
       case DELETE:
         return new DeleteHandler();

+ 23 - 8
ambari-server/src/main/java/org/apache/ambari/server/api/query/QueryImpl.java

@@ -21,6 +21,7 @@ package org.apache.ambari.server.api.query;
 import org.apache.ambari.server.api.resources.ResourceDefinition;
 import org.apache.ambari.server.api.services.ResultImpl;
 import org.apache.ambari.server.api.util.TreeNodeImpl;
+import org.apache.ambari.server.controller.internal.ClusterControllerImpl;
 import org.apache.ambari.server.controller.internal.PropertyIdImpl;
 import org.apache.ambari.server.controller.utilities.ClusterControllerHelper;
 import org.apache.ambari.server.controller.utilities.PropertyHelper;
@@ -31,6 +32,9 @@ import org.apache.ambari.server.controller.predicate.EqualsPredicate;
 import org.apache.ambari.server.api.services.Result;
 import org.apache.ambari.server.controller.spi.*;
 import org.apache.ambari.server.api.util.TreeNode;
+import org.mortbay.log.Log;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.*;
 
@@ -53,6 +57,9 @@ public class QueryImpl implements Query {
    */
   private Map<PropertyId, TemporalInfo> m_mapPropertyTemporalInfo = new HashMap<PropertyId, TemporalInfo>();
 
+  /**
+   * Map that associates categories with temporal data.
+   */
   private Map<String, TemporalInfo> m_mapCategoryTemporalInfo = new HashMap<String, TemporalInfo>();
 
   /**
@@ -75,7 +82,8 @@ public class QueryImpl implements Query {
    */
   private Predicate m_userPredicate;
 
-
+  private final static Logger LOG =
+      LoggerFactory.getLogger(QueryImpl.class);
   /**
    * Constructor.
    *
@@ -145,16 +153,23 @@ public class QueryImpl implements Query {
     int count = 1;
     for (Resource resource : iterResource) {
       // add a child node for the resource and provide a unique name.  The name is never used.
+      //todo: provide a more meaningful node name
       TreeNode<Resource> node = tree.addChild(resource, resource.getType() + ":" + count++);
-
-      for (Map.Entry<String, ResourceDefinition> entry : m_mapSubResources.entrySet()) {
+      LOG.info("Resource object resource " + resource);
+       for (Map.Entry<String, ResourceDefinition> entry : m_mapSubResources.entrySet()) {
         String subResCategory = entry.getKey();
         ResourceDefinition r = entry.getValue();
-
-        r.setParentId(m_resourceDefinition.getType(), (String) resource.getPropertyValue(
+        
+        r.setParentId(m_resourceDefinition.getType(), (String) (resource.getPropertyValue(
             getClusterController().getSchema(m_resourceDefinition.getType()).
-                getKeyPropertyId(m_resourceDefinition.getType())));
-
+                getKeyPropertyId(m_resourceDefinition.getType())).toString()));
+        
+        LOG.info("Setting various values for resource " + r.getId());
+        if (r.getResourceIds() != null) {
+          for (Map.Entry<Resource.Type, String> tentry: r.getResourceIds().entrySet()) {
+            LOG.info("Resource Id's " + tentry.getKey() + " value " + tentry.getValue());
+          }
+        }
         TreeNode<Resource> childResult = r.getQuery().execute().getResultTree();
         childResult.setName(subResCategory);
         childResult.setProperty("isCollection", "false");
@@ -344,6 +359,6 @@ public class QueryImpl implements Query {
   }
 
   Result createResult() {
-    return new ResultImpl();
+    return new ResultImpl(true);
   }
 }

+ 55 - 0
ambari-server/src/main/java/org/apache/ambari/server/api/resources/ActionResourceDefinition.java

@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.api.resources;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.ambari.server.controller.spi.Resource;
+
+
+public class ActionResourceDefinition extends BaseResourceDefinition {
+
+  private String m_clusterName;
+  private String m_serviceName;
+
+  public ActionResourceDefinition(String id, String clusterName, String serviceName) {
+    super(Resource.Type.Action, id);
+    m_clusterName = clusterName;
+    m_serviceName = serviceName;
+    setResourceId(Resource.Type.Cluster, m_clusterName);
+    setResourceId(Resource.Type.Service, m_serviceName);
+    getQuery().addProperty(getClusterController().getSchema(
+        Resource.Type.Action).getKeyPropertyId(Resource.Type.Action));
+  }
+  
+  @Override
+  public String getPluralName() {
+    return "actions";
+  }
+
+  @Override
+  public String getSingularName() {
+    return "action";
+  }
+
+  @Override
+  public Map<String, ResourceDefinition> getSubResources() {
+    return new HashMap<String, ResourceDefinition>();
+  }
+}

+ 11 - 10
ambari-server/src/main/java/org/apache/ambari/server/api/resources/ClusterResourceDefinition.java

@@ -21,7 +21,6 @@ package org.apache.ambari.server.api.resources;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.ambari.server.controller.spi.PropertyId;
 import org.apache.ambari.server.controller.spi.Resource;
 
 /**
@@ -58,23 +57,25 @@ public class ClusterResourceDefinition extends BaseResourceDefinition {
     Map<String, ResourceDefinition> mapChildren = new HashMap<String, ResourceDefinition>();
 
     ServiceResourceDefinition serviceResource = new ServiceResourceDefinition(null, getId());
-    PropertyId serviceIdProperty = getClusterController().getSchema(
-        Resource.Type.Service).getKeyPropertyId(Resource.Type.Service);
-    serviceResource.getQuery().addProperty(serviceIdProperty);
+    serviceResource.getQuery().addProperty(getClusterController().getSchema(
+        Resource.Type.Service).getKeyPropertyId(Resource.Type.Service));
     mapChildren.put(serviceResource.getPluralName(), serviceResource);
 
     HostResourceDefinition hostResource = new HostResourceDefinition(null, getId());
-    PropertyId hostIdProperty = getClusterController().getSchema(
-        Resource.Type.Host).getKeyPropertyId(Resource.Type.Host);
-    hostResource.getQuery().addProperty(hostIdProperty);
+    hostResource.getQuery().addProperty(getClusterController().getSchema(
+        Resource.Type.Host).getKeyPropertyId(Resource.Type.Host));
     mapChildren.put(hostResource.getPluralName(), hostResource);
     
     ConfigurationResourceDefinition configResource = new ConfigurationResourceDefinition(null, null, getId());
-    PropertyId configIdProperty = getClusterController().getSchema(
-        Resource.Type.Configuration).getKeyPropertyId(Resource.Type.Configuration);
-    configResource.getQuery().addProperty(configIdProperty);
+    configResource.getQuery().addProperty(getClusterController().getSchema(
+        Resource.Type.Configuration).getKeyPropertyId(Resource.Type.Configuration));
     mapChildren.put(configResource.getPluralName(), configResource);
 
+    RequestResourceDefinition requestResource = new RequestResourceDefinition(null, getId());
+    requestResource.getQuery().addProperty(getClusterController().getSchema(
+        Resource.Type.Request).getKeyPropertyId(Resource.Type.Request));
+    mapChildren.put(requestResource.getPluralName(), requestResource);
+
     return mapChildren;
   }
 }

+ 15 - 13
ambari-server/src/main/java/org/apache/ambari/server/api/resources/ConfigurationResourceDefinition.java

@@ -22,16 +22,13 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.ambari.server.api.resources.ResourceDefinition.PostProcessor;
 import org.apache.ambari.server.api.services.Request;
 import org.apache.ambari.server.api.util.TreeNode;
 import org.apache.ambari.server.controller.spi.Resource;
-import org.apache.ambari.server.controller.spi.Schema;
-import org.apache.ambari.server.controller.utilities.ClusterControllerHelper;
 import org.apache.ambari.server.controller.utilities.PropertyHelper;
 
 /**
- * Service resource definition.
+ * Configuration resource definition.
  */
 public class ConfigurationResourceDefinition extends BaseResourceDefinition {
 
@@ -40,28 +37,30 @@ public class ConfigurationResourceDefinition extends BaseResourceDefinition {
    */
   private String m_clusterId;
 
+
   /**
    * Constructor.
    *
-   * @param id        service id value
-   * @param clusterId cluster id value
+   * @param configType  configuration type
+   * @param configTag   configuration tag
+   * @param clusterId   cluster id value
    */
   public ConfigurationResourceDefinition(String configType, String configTag, String clusterId) {
     super(Resource.Type.Configuration, configType);
     m_clusterId = clusterId;
     setResourceId(Resource.Type.Cluster, m_clusterId);
-    
+
     if (null != configTag)
       setProperty(PropertyHelper.getPropertyId("tag", "Config"), configTag);
   }
-  
+
   @Override
   public List<PostProcessor> getPostProcessors() {
     List<PostProcessor> listProcessors = super.getPostProcessors();
     listProcessors.add(new HrefProcessor());
 
     return listProcessors;
-  }  
+  }
 
   @Override
   public String getPluralName() {
@@ -77,7 +76,7 @@ public class ConfigurationResourceDefinition extends BaseResourceDefinition {
   public Map<String, ResourceDefinition> getSubResources() {
     return new HashMap<String, ResourceDefinition>();
   }
-  
+
   private class HrefProcessor extends BaseHrefPostProcessor {
 
     @Override
@@ -87,15 +86,18 @@ public class ConfigurationResourceDefinition extends BaseResourceDefinition {
         String clusterId = getResourceIds().get(Resource.Type.Cluster);
         String type = (String) resultNode.getObject().getPropertyValue(PropertyHelper.getPropertyId("type"));
         String tag = (String) resultNode.getObject().getPropertyValue(PropertyHelper.getPropertyId("tag"));
-        
+
+        if (! href.endsWith("/")) {
+          href += '/';
+        }
         href = href.substring(0, href.indexOf(clusterId) + clusterId.length() + 1) +
             "configurations?type=" + type + "&tag=" + tag;
-        
+
         resultNode.setProperty("href", href);
       } else {
         super.process(request, resultNode, href);
       }
-      
+
     }
   }
 }

+ 25 - 8
ambari-server/src/main/java/org/apache/ambari/server/api/resources/HostResourceDefinition.java

@@ -18,10 +18,13 @@
 
 package org.apache.ambari.server.api.resources;
 
-import org.apache.ambari.server.controller.spi.PropertyId;
+
+import java.util.HashMap;
+import java.util.Map;
+
 import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.utilities.PropertyHelper;
 
-import java.util.*;
 
 /**
  * Host resource definition.
@@ -43,6 +46,17 @@ public class HostResourceDefinition extends BaseResourceDefinition {
     super(Resource.Type.Host, id);
     m_clusterId = clusterId;
     setResourceId(Resource.Type.Cluster, m_clusterId);
+    
+    if (null != clusterId) {
+      getQuery().addProperty(PropertyHelper.getPropertyId("cluster_name", "Hosts"));      
+    }
+    
+    if (null == id) {
+      getQuery().addProperty(getClusterController().getSchema(
+          Resource.Type.Host).getKeyPropertyId(Resource.Type.Host));
+    } else {
+      getQuery().addProperty(null, "*", null);
+    }
   }
 
   @Override
@@ -59,12 +73,15 @@ public class HostResourceDefinition extends BaseResourceDefinition {
   public Map<String, ResourceDefinition> getSubResources() {
     Map<String, ResourceDefinition> mapChildren = new HashMap<String, ResourceDefinition>();
 
-    HostComponentResourceDefinition hostComponentResource = new HostComponentResourceDefinition(
-        null, m_clusterId, getId());
-    PropertyId hostComponentIdProperty = getClusterController().getSchema(
-        Resource.Type.HostComponent).getKeyPropertyId(Resource.Type.HostComponent);
-    hostComponentResource.getQuery().addProperty(hostComponentIdProperty);
-    mapChildren.put(hostComponentResource.getPluralName(), hostComponentResource);
+    // !!! is this a host for a cluster
+    if (null != m_clusterId) {
+      HostComponentResourceDefinition hostComponentResource =
+          new HostComponentResourceDefinition(null, m_clusterId, getId());
+      hostComponentResource.getQuery().addProperty(getClusterController().getSchema(
+          Resource.Type.HostComponent).getKeyPropertyId(Resource.Type.HostComponent));
+      mapChildren.put(hostComponentResource.getPluralName(), hostComponentResource);
+    }
+
     return mapChildren;
   }
 }

+ 73 - 0
ambari-server/src/main/java/org/apache/ambari/server/api/resources/RequestResourceDefinition.java

@@ -0,0 +1,73 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.api.resources;
+
+
+import org.apache.ambari.server.controller.spi.Resource;
+
+import java.util.HashMap;
+import java.util.Map;
+
+
+/**
+ * Request resource definition.
+ */
+public class RequestResourceDefinition extends BaseResourceDefinition {
+
+  /**
+   * value of cluster id foreign key
+   */
+  private String m_clusterId;
+
+
+  /**
+   * Constructor.
+   *
+   * @param id         operation id value
+   * @param clusterId  cluster id value
+   */
+  public RequestResourceDefinition(String id, String clusterId) {
+    super(Resource.Type.Request, id);
+    m_clusterId = clusterId;
+    setResourceId(Resource.Type.Cluster, m_clusterId);
+  }
+
+  @Override
+  public String getPluralName() {
+    return "requests";
+  }
+
+  @Override
+  public String getSingularName() {
+    return "request";
+  }
+
+  @Override
+  public Map<String, ResourceDefinition> getSubResources() {
+    Map<String, ResourceDefinition> mapChildren = new HashMap<String, ResourceDefinition>();
+
+    TaskResourceDefinition taskResourceDefinition =
+        new TaskResourceDefinition(null, m_clusterId, getId());
+    taskResourceDefinition.getQuery().addProperty(getClusterController().getSchema(
+        Resource.Type.Task).getKeyPropertyId(Resource.Type.Task));
+    mapChildren.put(taskResourceDefinition.getPluralName(), taskResourceDefinition);
+
+    return mapChildren;
+  }
+}

+ 71 - 0
ambari-server/src/main/java/org/apache/ambari/server/api/resources/TaskResourceDefinition.java

@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.api.resources;
+
+import org.apache.ambari.server.controller.spi.Resource;
+
+import java.util.Collections;
+import java.util.Map;
+
+
+/**
+ * Task resource definition.
+ */
+public class TaskResourceDefinition extends BaseResourceDefinition {
+
+  /**
+   * Value of cluster id foreign key.
+   */
+  private String m_clusterId;
+
+  /**
+   * Value of request id foreign key.
+   */
+  private String m_requestId;
+
+
+  /**
+   * Constructor.
+   *
+   * @param id         task id value
+   * @param clusterId  cluster id value
+   */
+  public TaskResourceDefinition(String id, String clusterId, String requestId) {
+    super(Resource.Type.Task, id);
+    m_clusterId = clusterId;
+    m_requestId = requestId;
+    setResourceId(Resource.Type.Cluster, m_clusterId);
+    setResourceId(Resource.Type.Request, m_requestId);
+  }
+
+  @Override
+  public String getPluralName() {
+    return "tasks";
+  }
+
+  @Override
+  public String getSingularName() {
+    return "task";
+  }
+
+  @Override
+  public Map<String, ResourceDefinition> getSubResources() {
+    return Collections.emptyMap();
+  }
+}

+ 1 - 1
ambari-server/src/main/java/org/apache/ambari/server/api/rest/HealthCheck.java

@@ -30,7 +30,7 @@ import javax.ws.rs.core.MediaType;
 
 @Path("/check")
 public class HealthCheck {
-  private  final String status = "RUNNING";
+  private static final String status = "RUNNING";
   // This method is called if TEXT_PLAIN is request
 
   @GET

+ 124 - 0
ambari-server/src/main/java/org/apache/ambari/server/api/services/ActionService.java

@@ -0,0 +1,124 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.api.services;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+
+import org.apache.ambari.server.api.resources.ActionResourceDefinition;
+import org.apache.ambari.server.api.resources.ResourceDefinition;
+
+public class ActionService extends BaseService {
+  /**
+   * Parent cluster name.
+   */
+  private String m_clusterName;
+  
+  private String m_serviceName;
+
+  /**
+   * Constructor.
+   *
+   * @param clusterName cluster id
+   * @param serviceName service
+   */
+  public ActionService(String clusterName, String serviceName) {
+    m_clusterName = clusterName;
+    m_serviceName = serviceName;
+  }
+
+  /**
+   * Handles URL: /clusters/{clusterId}/services/{serviceName}/actions
+   * Get all actions for a service in a cluster.
+   *
+   * @param headers http headers
+   * @param ui      uri info
+   * @return service collection resource representation
+   */
+  @GET
+  @Produces("text/plain")
+  public Response getActions(@Context HttpHeaders headers, @Context UriInfo ui) {
+    return handleRequest(headers, null, ui, Request.Type.GET,
+        createResourceDefinition(null, m_clusterName, m_serviceName));
+  }
+
+  /**
+   * Handles URL: /clusters/{clusterId}/services/{serviceName}/actions.  
+   * The body should contain:
+   * <pre>
+   * {
+   *     "actionName":"name_string",
+   *     "parameters":
+   *     {
+   *         "key1":"value1",
+   *         // ...
+   *         "keyN":"valueN"
+   *     }
+   * }
+   * </pre>
+   * Get all services for a cluster.
+   *
+   * @param headers http headers
+   * @param ui      uri info
+   * @return service collection resource representation
+   */
+  @POST
+  @Produces("text/plain")
+  public Response createActions(String body,@Context HttpHeaders headers, @Context UriInfo ui) {
+    return handleRequest(headers, body, ui, Request.Type.POST,
+        createResourceDefinition(null, m_clusterName, m_serviceName));
+  }
+  
+  /**
+   * Handles: POST /clusters/{clusterId}/services/{serviceId}/{actionName}
+   * Create a specific service.
+   *
+   * @param body        http body
+   * @param headers     http headers
+   * @param ui          uri info
+   * @param serviceName service id
+   * @return information regarding the created service
+   */
+  @POST
+  @Path("{actionName}")
+  @Produces("text/plain")
+  public Response createService(String body, @Context HttpHeaders headers, @Context UriInfo ui,
+                                @PathParam("actionName") String actionName) {
+    return handleRequest(headers, body, ui, Request.Type.POST,
+        createResourceDefinition(actionName, m_clusterName, m_serviceName));
+  }
+
+  /**
+   * Create a service resource definition.
+   *
+   * @param serviceName host name
+   * @param clusterName cluster name
+   * @return a service resource definition
+   */
+  ResourceDefinition createResourceDefinition(String actionName,
+      String clusterName, String serviceName) {
+    return new ActionResourceDefinition(actionName, clusterName, serviceName);
+  }
+}

+ 426 - 89
ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java

@@ -18,38 +18,54 @@
 
 package org.apache.ambari.server.api.services;
 
-import org.apache.ambari.server.configuration.Configuration;
-import org.apache.ambari.server.state.PropertyInfo;
-import org.apache.ambari.server.state.RepositoryInfo;
-import org.apache.ambari.server.state.ServiceInfo;
-import org.apache.ambari.server.state.StackInfo;
-
-import javax.ws.rs.Path;
 import java.io.File;
 import java.io.IOException;
-import java.util.*;
+import java.io.StringWriter;
+import java.net.URI;
+import java.net.URLEncoder;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
 
-import javax.xml.parsers.DocumentBuilderFactory;
 import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
 import javax.xml.parsers.ParserConfigurationException;
+import javax.xml.transform.OutputKeys;
+import javax.xml.transform.Transformer;
+import javax.xml.transform.TransformerException;
+import javax.xml.transform.TransformerFactory;
+import javax.xml.transform.dom.DOMSource;
+import javax.xml.transform.stream.StreamResult;
 
+import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.state.ComponentInfo;
+import org.apache.ambari.server.state.PropertyInfo;
+import org.apache.ambari.server.state.RepositoryInfo;
+import org.apache.ambari.server.state.ServiceInfo;
+import org.apache.ambari.server.state.StackInfo;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.w3c.dom.Document;
-import org.w3c.dom.NodeList;
-import org.w3c.dom.Node;
 import org.w3c.dom.Element;
+import org.w3c.dom.NamedNodeMap;
+import org.w3c.dom.Node;
+import org.w3c.dom.NodeList;
 import org.xml.sax.SAXException;
 
+import com.google.inject.Inject;
+import com.google.inject.Singleton;
+
 /**
  * ServiceInfo responsible getting information about cluster.
  */
-@Path("/metainfo/")
+@Singleton
 public class AmbariMetaInfo {
 
   private List<StackInfo> stacksResult = new ArrayList<StackInfo>();
-
-  private final static Logger log = LoggerFactory.getLogger(AmbariMetaInfo.class);
+  private File stackRoot;
+  private final static Logger LOG = LoggerFactory
+      .getLogger(AmbariMetaInfo.class);
 
   private static final String SERVICES_FOLDER_NAME = "services";
   private static final String SERVICE_METAINFO_FILE_NAME = "metainfo.xml";
@@ -58,23 +74,196 @@ public class AmbariMetaInfo {
 
   private static final String REPOSITORY_FILE_NAME = "repoinfo.xml";
   private static final String REPOSITORY_FOLDER_NAME = "repos";
-  private static final String REPOSITORY_XML_MAIN_BLOCK_NAME = "repo";
-  private static final String REPOSITORY_XML_PROPERTY_URL = "url";
-  private static final String REPOSITORY_XML_PROPERTY_OS = "os";
-  private static final String REPOSITORY_XML_PROPERTY_DESCRIPTION = "description";
+  private static final String REPOSITORY_XML_MAIN_BLOCK_NAME = "os";
+  private static final String REPOSITORY_XML_ATTRIBUTE_OS_TYPE = "type";
+  private static final String REPOSITORY_XML_REPO_BLOCK_NAME = "repo";
+  private static final String REPOSITORY_XML_PROPERTY_BASEURL = "baseurl";
+  private static final String REPOSITORY_XML_PROPERTY_REPOID = "repoid";
+  private static final String REPOSITORY_XML_PROPERTY_REPONAME = "reponame";
+  private static final String REPOSITORY_XML_PROPERTY_MIRRORSLIST = "mirrorslist";
 
   private static final String METAINFO_XML_MAIN_BLOCK_NAME = "metainfo";
   private static final String METAINFO_XML_PROPERTY_VERSION = "version";
   private static final String METAINFO_XML_PROPERTY_USER = "user";
   private static final String METAINFO_XML_PROPERTY_COMMENT = "comment";
+  private static final String METAINFO_XML_PROPERTY_COMPONENT_MAIN = "component";
+  private static final String METAINFO_XML_PROPERTY_COMPONENT_NAME = "name";
+  private static final String METAINFO_XML_PROPERTY_COMPONENT_CATEGORY = "category";
 
   private static final String PROPERTY_XML_MAIN_BLOCK_NAME = "property";
   private static final String PROPERTY_XML_PROPERTY_NAME = "name";
   private static final String PROPERTY_XML_PROPERTY_VALUE = "value";
   private static final String PROPERTY_XML_PROPERTY_DESCRIPTION = "description";
 
+  
+  /**
+   * Ambari Meta Info Object
+   * @param conf Configuration API to be used.
+   * @throws Exception
+   */
+  @Inject
+  public AmbariMetaInfo(Configuration conf) throws Exception {
+    String stackPath = conf.getMetadataPath();
+    this.stackRoot = new File(stackPath);
+  }
+  
+  @Inject
+  public AmbariMetaInfo(File stackRoot) throws Exception {
+    this.stackRoot = stackRoot;
+  }
+
+
+  /**
+   * Initialize the Ambari Meta Info
+   * @throws Exception throws exception if not able to parse the Meta data.
+   */
+  public void init() throws Exception {
+    getConfigurationInformation(stackRoot);
+  }
+
+
+  /**
+   * Get component category
+   * @param stackName
+   * @param version
+   * @param serviceName
+   * @param componentName
+   * @return component component Info
+   */
+  public ComponentInfo getComponentCategory(String stackName, String version,
+      String serviceName, String componentName) {
+    ComponentInfo component = null;
+    List<ComponentInfo> components = getComponentsByService(stackName, version,
+        serviceName);
+    if (components != null)
+      for (ComponentInfo cmp : components) {
+        if (cmp.getName().equals(componentName)) {
+          component = cmp;
+          break;
+        }
+      }
+    return component;
+  }
+
+
+  /**
+   * Get components by service
+   * @param stackName
+   * @param version
+   * @param serviceName
+   * @return
+   */
+  public List<ComponentInfo> getComponentsByService(String stackName,
+      String version, String serviceName) {
+    List<ComponentInfo> componentsResult = null;
+    ServiceInfo service = getServiceInfo(stackName, version, serviceName);
+    if (service != null)
+      componentsResult = service.getComponents();
+
+    return componentsResult;
+  }
+
+  public Map<String, List<RepositoryInfo>> getRepository(String stackName,
+      String version) {
+    Map<String, List<RepositoryInfo>> reposResult = null;
+    StackInfo stack = getStackInfo(stackName, version);
+    if (stack != null) {
+      List<RepositoryInfo> repository = stack.getRepositories();
+      reposResult = new HashMap<String, List<RepositoryInfo>>();
+      for (RepositoryInfo repo : repository) {
+        if (!reposResult.containsKey(repo.getOsType())) {
+          reposResult.put(repo.getOsType(),
+              new ArrayList<RepositoryInfo>());
+        }
+        reposResult.get(repo.getOsType()).add(repo);
+      }
+    }
+    return reposResult;
+  }
+
+  /*
+   * function for given a stack name and version, is it a supported stack
+   */
+  public boolean isSupportedStack(String stackName, String version) {
+    boolean exist = false;
+    StackInfo stack = getStackInfo(stackName, version);
+    if (stack == null)
+      exist = true;
+    return exist;
+  }
+
+  /*
+   * support isValidService(), isValidComponent for a given stack/version
+   */
+  public boolean isValidService(String stackName, String version,
+      String serviceName) {
+    ServiceInfo service = getServiceInfo(stackName, version, serviceName);
+    return (service != null);
+  }
+
+  /*
+   * support isValidService(), isValidComponent for a given stack/version
+   */
+  public boolean isValidServiceComponent(String stackName, String version,
+      String serviceName, String componentName) {
+    ServiceInfo service = getServiceInfo(stackName, version, serviceName);
+    if (service == null) {
+      return false;
+    }
+    for (ComponentInfo compInfo: service.getComponents()) {
+      if (compInfo.getName().equals(componentName)) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+
+  /**
+   * Get the name of a service given the component name.
+   * @param stackName the stack name
+   * @param version the stack version
+   * @param componentName the component name
+   * @return the service name
+   */
+  public String getComponentToService(String stackName, String version,
+      String componentName) {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("Looking for service for component"
+          + ", stackName=" + stackName
+          + ", stackVersion=" + version
+          + ", componentName=" + componentName);
+    }
+    Map<String, ServiceInfo> services = getServices(stackName, version);
+    String retService = null;
+    if (services == null
+        || services.isEmpty()) {
+      return retService;
+    }
+    boolean found = false;
+    for (Map.Entry<String, ServiceInfo> entry: services.entrySet()) {
+      for (ComponentInfo compInfo: entry.getValue().getComponents()) {
+        if (compInfo.getName().equals(componentName)) {
+          retService = entry.getKey();
+          found = true;
+          break;
+        }
+      }
+      if (found)
+        break;
+    }
+    return retService;
+  }
 
-  public Map<String, Map<String, String>> getSupportedConfigs(String stackName, String version, String serviceName) {
+  /**
+   * Get the service configs supported for a service in a particular stack
+   * @param stackName the stack name
+   * @param version the version of the stack
+   * @param serviceName the name of the service in the stack
+   * @return the config knobs supported for the service
+   */
+  public Map<String, Map<String, String>> getSupportedConfigs(String stackName,
+      String version, String serviceName) {
     Map<String, Map<String, String>> propertiesResult = new HashMap<String, Map<String, String>>();
 
     ServiceInfo service = getServiceInfo(stackName, version, serviceName);
@@ -83,14 +272,17 @@ public class AmbariMetaInfo {
         List<PropertyInfo> properties = service.getProperties();
         if (properties != null)
           for (PropertyInfo propertyInfo : properties) {
-            Map<String, String> fileProperties = propertiesResult.get(propertyInfo.getFilename());
+            Map<String, String> fileProperties = propertiesResult
+                .get(propertyInfo.getFilename());
             if (fileProperties == null) {
               fileProperties = new HashMap<String, String>();
-              fileProperties.put(propertyInfo.getName(), propertyInfo.getValue());
+              fileProperties.put(propertyInfo.getName(),
+                  propertyInfo.getValue());
               propertiesResult.put(propertyInfo.getFilename(), fileProperties);
 
             } else {
-              fileProperties.put(propertyInfo.getName(), propertyInfo.getValue());
+              fileProperties.put(propertyInfo.getName(),
+                  propertyInfo.getValue());
             }
 
           }
@@ -99,11 +291,36 @@ public class AmbariMetaInfo {
     return propertiesResult;
   }
 
-  public ServiceInfo getServiceInfo(String stackName, String version, String serviceName) {
+  /**
+   * Given a stack name and version return all the services with info
+   * @param stackName the stack name
+   * @param version the version of the stack
+   * @return the information of abt varios services that are supported in the
+   * stack
+   */
+  public Map<String, ServiceInfo> getServices(String stackName, String version) {
+
+    Map<String, ServiceInfo> servicesInfoResult = new HashMap<String, ServiceInfo>();
+
+    List<ServiceInfo> services = null;
+    StackInfo stack = getStackInfo(stackName, version);
+    if (stack == null)
+      return null;
+    services = stack.getServices();
+    if (services != null)
+      for (ServiceInfo service : services) {
+        servicesInfoResult.put(service.getName(), service);
+      }
+    return servicesInfoResult;
+  }
+
+  public ServiceInfo getServiceInfo(String stackName, String version,
+      String serviceName) {
     ServiceInfo serviceInfoResult = null;
     List<ServiceInfo> services = null;
     StackInfo stack = getStackInfo(stackName, version);
-    if (stack == null) return null;
+    if (stack == null)
+      return null;
     services = stack.getServices();
     if (services != null)
       for (ServiceInfo service : services) {
@@ -123,11 +340,16 @@ public class AmbariMetaInfo {
     return servicesResulr;
   }
 
-  private StackInfo getStackInfo(String stackName, String version) {
+  public List<StackInfo> getSupportedStacks() {
+    return stacksResult;
+  }
+
+  public StackInfo getStackInfo(String stackName, String version) {
     StackInfo stackInfoResult = null;
 
     for (StackInfo stack : stacksResult) {
-      if (stackName.equals(stack.getName()) && version.equals(stack.getVersion())) {
+      if (stackName.equals(stack.getName())
+          && version.equals(stack.getVersion())) {
         stackInfoResult = stack;
         break;
       }
@@ -136,75 +358,136 @@ public class AmbariMetaInfo {
   }
 
 
-  List<StackInfo> getSupportedStack() {
-    return stacksResult;
-  }
-
-
-  public AmbariMetaInfo() throws Exception {
-    getConfigurationInformation();
-  }
-
-  private void getConfigurationInformation() throws Exception {
+  private void getConfigurationInformation(File stackRoot) throws Exception {
 
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("Loading stack information"
+          + ", stackRoot=" + stackRoot.getPath());
+    }
 
-    File stackRoot = new File(new Configuration().getMetadataPath());//TODO uncomment before commit
-//    File stackRoot = new File("src/main/resources/stacks");
     if (!stackRoot.isDirectory() && !stackRoot.exists())
-      throw new IOException("" + Configuration.METADETA_DIR_PATH + " should be a directory with stack.");
+      throw new IOException("" + Configuration.METADETA_DIR_PATH
+          + " should be a directory with stack.");
     File[] stacks = stackRoot.listFiles();
     for (File stackFolder : stacks) {
-      if (stackFolder.isFile()) continue;
+      if (stackFolder.isFile())
+        continue;
       File[] concretStacks = stackFolder.listFiles();
       for (File stack : concretStacks) {
-        if (stack.isFile()) continue;
+        if (stack.isFile())
+          continue;
         StackInfo stackInfo = new StackInfo();
         stackInfo.setName(stackFolder.getName());
         stackInfo.setVersion(stack.getName());
+
+        if (LOG.isDebugEnabled()) {
+          LOG.debug("Adding new stack to known stacks"
+              + ", stackName=" + stackFolder.getName()
+              + ", stackVersion=" + stack.getName());
+        }
+
         stacksResult.add(stackInfo);
-        //get repository data for current stack of techs
-        File repositoryFolder = new File(stack.getAbsolutePath() + File.separator + REPOSITORY_FOLDER_NAME + File.separator + REPOSITORY_FILE_NAME);
+        // get repository data for current stack of techs
+        File repositoryFolder = new File(stack.getAbsolutePath()
+            + File.separator + REPOSITORY_FOLDER_NAME + File.separator
+            + REPOSITORY_FILE_NAME);
 
         if (repositoryFolder.exists()) {
+          if (LOG.isDebugEnabled()) {
+            LOG.debug("Adding repositories to stack"
+                + ", stackName=" + stackFolder.getName()
+                + ", stackVersion=" + stack.getName()
+                + ", repoFolder=" + repositoryFolder.getPath());
+          }
           List<RepositoryInfo> repositoryInfoList = getRepository(repositoryFolder);
           stackInfo.getRepositories().addAll(repositoryInfoList);
         }
 
-
-        //Get services for this stack
-        File servicesRootFolder = new File(stack.getAbsolutePath() + File.separator + SERVICES_FOLDER_NAME);
+        // Get services for this stack
+        File servicesRootFolder = new File(stack.getAbsolutePath()
+            + File.separator + SERVICES_FOLDER_NAME);
         File[] servicesFolders = servicesRootFolder.listFiles();
 
-        if (servicesFolders != null)
+        if (servicesFolders != null) {
           for (File serviceFolder : servicesFolders) {
-            //Get information about service
+            // Get information about service
             ServiceInfo serviceInfo = new ServiceInfo();
             serviceInfo.setName(serviceFolder.getName());
             stackInfo.getServices().add(serviceInfo);
 
-            //Get metainfo data from metainfo.xml
-            File metainfoFile = new File(serviceFolder.getAbsolutePath() + File.separator + SERVICE_METAINFO_FILE_NAME);
+            if (LOG.isDebugEnabled()) {
+              LOG.debug("Adding new service to stack"
+                  + ", stackName=" + stackFolder.getName()
+                  + ", stackVersion=" + stack.getName()
+                  + ", serviceName=" + serviceInfo.getName());
+            }
+
+            // Get metainfo data from metainfo.xml
+            File metainfoFile = new File(serviceFolder.getAbsolutePath()
+                + File.separator + SERVICE_METAINFO_FILE_NAME);
             if (metainfoFile.exists()) {
               setMetaInfo(metainfoFile, serviceInfo);
-
             }
 
-
-            //Get all properties from all "configs/*-site.xml" files
-            File serviceConfigFolder = new File(serviceFolder.getAbsolutePath() + File.separator + SERVICE_CONFIG_FOLDER_NAME);
+            // Get all properties from all "configs/*-site.xml" files
+            File serviceConfigFolder = new File(serviceFolder.getAbsolutePath()
+                + File.separator + SERVICE_CONFIG_FOLDER_NAME);
+            LOG.info("Listing config files in folder " + serviceConfigFolder);
             File[] configFiles = serviceConfigFolder.listFiles();
-            for (File config : configFiles) {
-              if (config.getName().endsWith(SERVICE_CONFIG_FILE_NAME_POSTFIX)) {
-                serviceInfo.getProperties().addAll(getProperties(config));
+            if (configFiles != null) {
+              for (File config : configFiles) {
+                if (config.getName().endsWith(SERVICE_CONFIG_FILE_NAME_POSTFIX)) {
+                  LOG.info("Reading Metadata Info from config filename " +
+                      config.getAbsolutePath());
+                  serviceInfo.getProperties().addAll(getProperties(config));
+                }
               }
             }
           }
-
+        }
       }
     }
 
   }
 
+  /**
+   *
+   * @param node
+   * @return
+   * @throws TransformerException
+   */
+  private String nodeToString(Node node) throws TransformerException {
+    // Set up the output transformer
+    TransformerFactory transfac = TransformerFactory.newInstance();
+    Transformer trans = transfac.newTransformer();
+    trans.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "yes");
+    trans.setOutputProperty(OutputKeys.INDENT, "yes");
+    StringWriter sw = new StringWriter();
+    StreamResult result = new StreamResult(sw);
+    DOMSource source = new DOMSource(node);
+    trans.transform(source, result);
+    String xmlString = sw.toString();
+    return xmlString;
+  }
+
+  /**
+   * Convert a document to string
+   * @param doc
+   * @return string
+   * @throws TransformerException
+   */
+  private String documentToString(Document doc) throws TransformerException {
+    Transformer transformer = TransformerFactory.newInstance().newTransformer();
+    transformer.setOutputProperty(OutputKeys.INDENT, "yes");
+
+    //initialize StreamResult with File object to save to file
+    StreamResult result = new StreamResult(new StringWriter());
+    DOMSource source = new DOMSource(doc);
+    transformer.transform(source, result);
+
+    String xmlString = result.getWriter().toString();
+    return xmlString;
+  }
 
   private List<RepositoryInfo> getRepository(File repositoryFile) {
 
@@ -214,21 +497,49 @@ public class AmbariMetaInfo {
       DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
       DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
       Document doc = dBuilder.parse(repositoryFile);
-      doc.getDocumentElement().normalize();
 
-      NodeList propertyNodes = doc.getElementsByTagName(REPOSITORY_XML_MAIN_BLOCK_NAME);
+      NodeList osNodes = doc
+          .getElementsByTagName(REPOSITORY_XML_MAIN_BLOCK_NAME);
 
-      for (int index = 0; index < propertyNodes.getLength(); index++) {
+      for (int index = 0; index < osNodes.getLength(); index++) {
+        Node osNode = osNodes.item(index);
 
-        Node node = propertyNodes.item(index);
-        if (node.getNodeType() == Node.ELEMENT_NODE) {
+        if (osNode.getNodeType() == Node.ELEMENT_NODE) {
+          if (!osNode.getNodeName().equals(REPOSITORY_XML_MAIN_BLOCK_NAME)) {
+            continue;
+          }
+          NamedNodeMap attrs = osNode.getAttributes();
+          Node osAttr = attrs.getNamedItem(REPOSITORY_XML_ATTRIBUTE_OS_TYPE);
+          if (osAttr == null) {
+            continue;
+          }
+          String osType = osAttr.getNodeValue();
 
-          Element property = (Element) node;
-          RepositoryInfo repositoryInfo = new RepositoryInfo();
-          repositoryInfo.setUrl(getTagValue(REPOSITORY_XML_PROPERTY_URL, property));
-          repositoryInfo.setOs(getTagValue(REPOSITORY_XML_PROPERTY_OS, property));
-          repositoryInfo.setDescription(getTagValue(REPOSITORY_XML_PROPERTY_DESCRIPTION, property));
-          repositorysInfo.add(repositoryInfo);
+          NodeList repoNodes = osNode.getChildNodes();
+          for (int j = 0; j < repoNodes.getLength(); j++) {
+            Node repoNode = repoNodes.item(j);
+            if (repoNode.getNodeType() != Node.ELEMENT_NODE) {
+              continue;
+            }
+            Element property = (Element) repoNode;
+            RepositoryInfo repositoryInfo = new RepositoryInfo();
+            repositoryInfo.setOsType(osType);
+            repositoryInfo.setRepoId(getTagValue(REPOSITORY_XML_PROPERTY_REPOID,
+                property));
+            repositoryInfo.setRepoName(
+                getTagValue(REPOSITORY_XML_PROPERTY_REPONAME, property));
+
+            repositoryInfo.setBaseUrl(getTagValue(
+                REPOSITORY_XML_PROPERTY_BASEURL, property));
+            repositoryInfo.setMirrorsList(getTagValue(
+                REPOSITORY_XML_PROPERTY_MIRRORSLIST, property));
+
+            if (LOG.isDebugEnabled()) {
+              LOG.debug("Adding repo to stack"
+                  + ", repoInfo=" + repositoryInfo.toString());
+            }
+            repositorysInfo.add(repositoryInfo);
+          }
         }
       }
 
@@ -239,7 +550,6 @@ public class AmbariMetaInfo {
     return repositorysInfo;
   }
 
-
   private void setMetaInfo(File metainfoFile, ServiceInfo serviceInfo) {
 
     DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
@@ -250,16 +560,17 @@ public class AmbariMetaInfo {
       dBuilder = dbFactory.newDocumentBuilder();
       doc = dBuilder.parse(metainfoFile);
     } catch (SAXException e) {
-      log.error("Error while parsing metainf.xml", e);
+      LOG.error("Error while parsing metainf.xml", e);
     } catch (IOException e) {
-      log.error("Error while open metainf.xml", e);
+      LOG.error("Error while open metainf.xml", e);
     } catch (ParserConfigurationException e) {
-      log.error("Error while parsing metainf.xml", e);
+      LOG.error("Error while parsing metainf.xml", e);
     }
 
     doc.getDocumentElement().normalize();
 
-    NodeList metaInfoNodes = doc.getElementsByTagName(METAINFO_XML_MAIN_BLOCK_NAME);
+    NodeList metaInfoNodes = doc
+        .getElementsByTagName(METAINFO_XML_MAIN_BLOCK_NAME);
 
     if (metaInfoNodes.getLength() > 0) {
       Node metaInfoNode = metaInfoNodes.item(0);
@@ -267,14 +578,36 @@ public class AmbariMetaInfo {
 
         Element metaInfoElem = (Element) metaInfoNode;
 
-        serviceInfo.setVersion(getTagValue(METAINFO_XML_PROPERTY_VERSION, metaInfoElem));
-        serviceInfo.setUser(getTagValue(METAINFO_XML_PROPERTY_USER, metaInfoElem));
-        serviceInfo.setComment(getTagValue(METAINFO_XML_PROPERTY_COMMENT, metaInfoElem));
+        serviceInfo.setVersion(getTagValue(METAINFO_XML_PROPERTY_VERSION,
+            metaInfoElem));
+        serviceInfo.setUser(getTagValue(METAINFO_XML_PROPERTY_USER,
+            metaInfoElem));
+        serviceInfo.setComment(getTagValue(METAINFO_XML_PROPERTY_COMMENT,
+            metaInfoElem));
       }
     }
 
-  }
+    NodeList componentInfoNodes = doc
+        .getElementsByTagName(METAINFO_XML_PROPERTY_COMPONENT_MAIN);
+
+    if (componentInfoNodes.getLength() > 0) {
+      for (int index = 0; index < componentInfoNodes.getLength(); index++) {
+        Node componentInfoNode = componentInfoNodes.item(index);
+        if (componentInfoNode.getNodeType() == Node.ELEMENT_NODE) {
+          Element componentInfoElem = (Element) componentInfoNode;
+
+          ComponentInfo componentInfo = new ComponentInfo();
+          componentInfo.setName(getTagValue(
+              METAINFO_XML_PROPERTY_COMPONENT_NAME, componentInfoElem));
+          componentInfo.setCategory(getTagValue(
+              METAINFO_XML_PROPERTY_COMPONENT_CATEGORY, componentInfoElem));
+          serviceInfo.getComponents().add(componentInfo);
 
+        }
+      }
+    }
+
+  }
 
   private List<PropertyInfo> getProperties(File propertyFile) {
 
@@ -285,18 +618,22 @@ public class AmbariMetaInfo {
       Document doc = dBuilder.parse(propertyFile);
       doc.getDocumentElement().normalize();
 
-      NodeList propertyNodes = doc.getElementsByTagName(PROPERTY_XML_MAIN_BLOCK_NAME);
+      NodeList propertyNodes = doc
+          .getElementsByTagName(PROPERTY_XML_MAIN_BLOCK_NAME);
 
       for (int index = 0; index < propertyNodes.getLength(); index++) {
 
         Node node = propertyNodes.item(index);
         if (node.getNodeType() == Node.ELEMENT_NODE) {
-
           Element property = (Element) node;
           PropertyInfo propertyInfo = new PropertyInfo();
-          propertyInfo.setName(getTagValue(PROPERTY_XML_PROPERTY_NAME, property));
-          propertyInfo.setValue(getTagValue(PROPERTY_XML_PROPERTY_VALUE, property));
-          propertyInfo.setDescription(getTagValue(PROPERTY_XML_PROPERTY_DESCRIPTION, property));
+          propertyInfo
+          .setName(getTagValue(PROPERTY_XML_PROPERTY_NAME, property));
+          propertyInfo.setValue(getTagValue(PROPERTY_XML_PROPERTY_VALUE,
+              property));
+
+          propertyInfo.setDescription(getTagValue(
+              PROPERTY_XML_PROPERTY_DESCRIPTION, property));
           propertyInfo.setFilename(propertyFile.getName());
 
           if (propertyInfo.getName() == null || propertyInfo.getValue() == null)
@@ -312,15 +649,17 @@ public class AmbariMetaInfo {
     return resultPropertyList;
   }
 
-
   private String getTagValue(String sTag, Element rawElement) {
     String result = null;
     try {
-      NodeList element = rawElement.getElementsByTagName(sTag).item(0).getChildNodes();
+      NodeList element = rawElement.getElementsByTagName(sTag).item(0)
+          .getChildNodes();
       Node value = (Node) element.item(0);
       result = value.getNodeValue();
-    } catch (NullPointerException e) {
-      log.debug("There is no field like " + sTag + "in this DOM element.", e);
+    } catch (Exception e) {
+//      LOG.info("Error in getting tag value " ,  e);
+      // log.debug("There is no field like " + sTag + "in this DOM element.",
+      // e);
     } finally {
       return result;
     }
@@ -328,5 +667,3 @@ public class AmbariMetaInfo {
   }
 
 }
-
-

+ 116 - 0
ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaService.java

@@ -0,0 +1,116 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.api.services;
+
+import java.io.IOException;
+import java.util.List;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.Response;
+import javax.xml.bind.JAXBException;
+
+import org.apache.ambari.server.state.ServiceInfo;
+import org.apache.ambari.server.state.StackInfo;
+import org.apache.ambari.server.utils.StageUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.codehaus.jackson.JsonGenerationException;
+import org.codehaus.jackson.map.JsonMappingException;
+import org.codehaus.jackson.map.ObjectMapper;
+import org.codehaus.jackson.map.SerializationConfig;
+import org.codehaus.jackson.map.ser.FilterProvider;
+import org.codehaus.jackson.map.ser.impl.SimpleBeanPropertyFilter;
+import org.codehaus.jackson.map.ser.impl.SimpleFilterProvider;
+
+import com.google.inject.Inject;
+
+@Path("/stacks/")
+public class AmbariMetaService {
+  private static AmbariMetaInfo ambariMetainfo;
+  private static Log LOG = LogFactory.getLog(AmbariMetaService.class);
+
+  @Inject
+  public static void init(AmbariMetaInfo instance) {
+    ambariMetainfo = instance;
+  }
+
+  /**
+   * Filter properties from the service info and others
+   * @param object
+   * @return
+   * @throws IOException 
+   * @throws JsonMappingException 
+   * @throws JsonGenerationException 
+   */
+  public String filterProperties(Object object, boolean ignoreConfigs) throws 
+  JsonGenerationException, JsonMappingException, IOException {
+    ObjectMapper mapper = new ObjectMapper();
+    mapper.configure(SerializationConfig.Feature.INDENT_OUTPUT, true);
+    mapper.configure(SerializationConfig.Feature.USE_ANNOTATIONS, true);
+    if (ignoreConfigs) {
+    FilterProvider filters = new SimpleFilterProvider().addFilter(
+          "propertiesfilter",
+          SimpleBeanPropertyFilter.serializeAllExcept("properties"));
+      mapper.setFilters(filters);
+    } else {
+      FilterProvider filters = new SimpleFilterProvider().addFilter(
+          "propertiesfilter", SimpleBeanPropertyFilter.serializeAllExcept());
+      mapper.setFilters(filters);
+    }
+    String json = mapper.writeValueAsString(object);
+    return json;
+  }
+
+  @GET
+  @Produces("text/plain")
+  public Response getStacks() throws JsonGenerationException, 
+  JsonMappingException, JAXBException, IOException {
+    List<StackInfo> stackInfos = ambariMetainfo.getSupportedStacks();
+    String output = filterProperties(stackInfos, true);
+    return Response.status(Response.Status.OK).entity(output).build();
+  }
+
+  @GET
+  @Path("{stackName}/version/{versionNumber}")
+  @Produces("text/plain")
+  public Response getStack(@PathParam("stackName") String stackName,
+      @PathParam("versionNumber") String versionNumber) throws 
+      JsonGenerationException, JsonMappingException, JAXBException, IOException  {
+    StackInfo stackInfo = ambariMetainfo.getStackInfo(stackName, versionNumber);
+    String output = filterProperties(stackInfo, true);
+    return Response.status(Response.Status.OK).entity(output).build();
+  }
+
+  @GET
+  @Path("{stackName}/version/{versionNumber}/services/{serviceName}")
+  @Produces("text/plain")
+  public Response getServiceInfo(@PathParam("stackName") String stackName,
+      @PathParam("versionNumber") String versionNumber,  
+      @PathParam("serviceName") String serviceName) throws 
+      JsonGenerationException, JsonMappingException, JAXBException, IOException  {
+    ServiceInfo serviceInfo = ambariMetainfo.getServiceInfo(stackName,
+        versionNumber, serviceName);
+    String output = filterProperties(serviceInfo, false);
+    return Response.status(Response.Status.OK).entity(output).build();
+  }
+
+}

+ 5 - 4
ambari-server/src/main/java/org/apache/ambari/server/api/services/BaseService.java

@@ -37,13 +37,12 @@ public abstract class BaseService {
    * This consists of creating a {@link Request} instance, invoking the correct {@link RequestHandler} and
    * applying the proper {@link ResultSerializer} to the result.
    *
-   *
-   *
    * @param headers            http headers
-   * @param body
+   * @param body               http body
    * @param uriInfo            uri information
    * @param requestType        http request type
    * @param resourceDefinition resource definition that is being acted on
+   *
    * @return the response of the operation in serialized form
    */
   protected Response handleRequest(HttpHeaders headers, String body, UriInfo uriInfo, Request.Type requestType,
@@ -52,7 +51,9 @@ public abstract class BaseService {
     Request request = getRequestFactory().createRequest(headers, body, uriInfo, requestType, resourceDefinition);
     Result result = getRequestHandler().handleRequest(request);
 
-    return getResponseFactory().createResponse(request.getResultSerializer().serialize(result, uriInfo));
+    return getResponseFactory().createResponse(requestType,
+        request.getResultSerializer().serialize(result, uriInfo),
+        result.isSynchronous());
   }
 
   /**

+ 14 - 6
ambari-server/src/main/java/org/apache/ambari/server/api/services/ClusterService.java

@@ -64,7 +64,7 @@ public class ClusterService extends BaseService {
   }
 
   /**
-   * Handles: PUT /clusters/{clusterID}
+   * Handles: POST /clusters/{clusterID}
    * Create a specific cluster.
    *
    * @param headers     http headers
@@ -72,17 +72,17 @@ public class ClusterService extends BaseService {
    * @param clusterName cluster id
    * @return information regarding the created cluster
    */
-   @PUT
+   @POST
    @Path("{clusterName}")
    @Produces("text/plain")
    public Response createCluster(String body, @Context HttpHeaders headers, @Context UriInfo ui,
                                  @PathParam("clusterName") String clusterName) {
 
-    return handleRequest(headers, body, ui, Request.Type.PUT, createResourceDefinition(clusterName));
+    return handleRequest(headers, body, ui, Request.Type.POST, createResourceDefinition(clusterName));
   }
 
   /**
-   * Handles: POST /clusters/{clusterID}
+   * Handles: PUT /clusters/{clusterID}
    * Update a specific cluster.
    *
    * @param headers     http headers
@@ -90,13 +90,13 @@ public class ClusterService extends BaseService {
    * @param clusterName cluster id
    * @return information regarding the updated cluster
    */
-  @POST
+  @PUT
   @Path("{clusterName}")
   @Produces("text/plain")
   public Response updateCluster(String body, @Context HttpHeaders headers, @Context UriInfo ui,
                                 @PathParam("clusterName") String clusterName) {
 
-    return handleRequest(headers, body, ui, Request.Type.POST, createResourceDefinition(clusterName));
+    return handleRequest(headers, body, ui, Request.Type.PUT, createResourceDefinition(clusterName));
   }
 
   /**
@@ -147,6 +147,14 @@ public class ClusterService extends BaseService {
     return new ConfigurationService(cluster);
   }
 
+  /**
+   * Gets the requests sub-resource.
+   */
+  @Path("{clusterName}/requests")
+  public RequestService getRequestHandler(@PathParam("clusterName") String clusterName) {
+    return new RequestService(clusterName);
+  }
+
   /**
    * Create a cluster resource definition.
    *

+ 25 - 7
ambari-server/src/main/java/org/apache/ambari/server/api/services/ComponentService.java

@@ -84,7 +84,7 @@ public class ComponentService extends BaseService {
   }
 
   /**
-   * Handles: PUT /clusters/{clusterID}/services/{serviceID}/components/{componentID}
+   * Handles: POST /clusters/{clusterID}/services/{serviceID}/components/{componentID}
    * Create a specific component.
    *
    * @param body          http body
@@ -94,37 +94,55 @@ public class ComponentService extends BaseService {
    *
    * @return information regarding the created component
    */
-  @PUT
+  @POST
   @Path("{componentName}")
   @Produces("text/plain")
   public Response createComponent(String body, @Context HttpHeaders headers, @Context UriInfo ui,
                                 @PathParam("componentName") String componentName) {
 
-    return handleRequest(headers, body, ui, Request.Type.PUT,
+    return handleRequest(headers, body, ui, Request.Type.POST,
         createResourceDefinition(componentName, m_clusterName, m_serviceName));
   }
 
   /**
-   * Handles: POST /clusters/{clusterID}/services/{serviceID}/components/{componentID}
+   * Handles: PUT /clusters/{clusterID}/services/{serviceID}/components/{componentID}
    * Update a specific component.
    *
-   * @param body                http body
+   * @param body          http body
    * @param headers       http headers
    * @param ui            uri info
    * @param componentName component id
    *
    * @return information regarding the updated component
    */
-  @POST
+  @PUT
   @Path("{componentName}")
   @Produces("text/plain")
   public Response updateComponent(String body, @Context HttpHeaders headers, @Context UriInfo ui,
                                 @PathParam("componentName") String componentName) {
 
-    return handleRequest(headers, body, ui, Request.Type.POST, createResourceDefinition(
+    return handleRequest(headers, body, ui, Request.Type.PUT, createResourceDefinition(
         componentName, m_clusterName, m_serviceName));
   }
 
+  /**
+   * Handles: PUT /clusters/{clusterID}/services/{serviceID}/components
+   * Update multiple components.
+   *
+   * @param body          http body
+   * @param headers       http headers
+   * @param ui            uri info
+   *
+   * @return information regarding the updated component
+   */
+  @PUT
+  @Produces("text/plain")
+  public Response updateComponents(String body, @Context HttpHeaders headers, @Context UriInfo ui) {
+
+    return handleRequest(headers, body, ui, Request.Type.PUT, createResourceDefinition(
+        null, m_clusterName, m_serviceName));
+  }
+
   /**
    * Handles: DELETE /clusters/{clusterID}/services/{serviceID}/components/{componentID}
    * Delete a specific component.

+ 6 - 4
ambari-server/src/main/java/org/apache/ambari/server/api/services/ConfigurationService.java

@@ -19,7 +19,7 @@
 package org.apache.ambari.server.api.services;
 
 import javax.ws.rs.GET;
-import javax.ws.rs.PUT;
+import javax.ws.rs.POST;
 import javax.ws.rs.Produces;
 import javax.ws.rs.core.Context;
 import javax.ws.rs.core.HttpHeaders;
@@ -82,19 +82,21 @@ public class ConfigurationService extends BaseService {
    * @param ui      uri info
    * @return service collection resource representation
    */
-  @PUT
+  @POST
   @Produces("text/plain")
   public Response createConfigurations(String body,@Context HttpHeaders headers, @Context UriInfo ui) {
 
-    return handleRequest(headers, body, ui, Request.Type.PUT,
+    return handleRequest(headers, body, ui, Request.Type.POST,
         createResourceDefinition(null, null, m_clusterName));
   }
 
   /**
    * Create a service resource definition.
    *
-   * @param serviceName host name
+   * @param configType  configuration type
+   * @param configTag   tag applied to the configuration
    * @param clusterName cluster name
+   *
    * @return a service resource definition
    */
   ResourceDefinition createResourceDefinition(String configType, String configTag, String clusterName) {

+ 3 - 3
ambari-server/src/main/java/org/apache/ambari/server/api/services/CreatePersistenceManager.java

@@ -21,6 +21,7 @@ package org.apache.ambari.server.api.services;
 import org.apache.ambari.server.api.resources.ResourceDefinition;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.controller.spi.ClusterController;
+import org.apache.ambari.server.controller.spi.RequestStatus;
 import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.ambari.server.controller.spi.Schema;
 
@@ -31,7 +32,7 @@ import java.util.Map;
  */
 public class CreatePersistenceManager extends BasePersistenceManager {
   @Override
-  public void persist(ResourceDefinition resource) {
+  public RequestStatus persist(ResourceDefinition resource) {
     ClusterController controller = getClusterController();
     Map<Resource.Type, String> mapResourceIds = resource.getResourceIds();
     Resource.Type type = resource.getType();
@@ -41,11 +42,10 @@ public class CreatePersistenceManager extends BasePersistenceManager {
       resource.setProperty(schema.getKeyPropertyId(entry.getKey()), entry.getValue());
     }
     try {
-      controller.createResources(type, createControllerRequest(resource.getProperties()));
+      return controller.createResources(type, createControllerRequest(resource.getProperties()));
     } catch (AmbariException e) {
       //todo: handle exception
       throw new RuntimeException("Create of resource failed: " + e, e);
-
     }
   }
 }

+ 5 - 2
ambari-server/src/main/java/org/apache/ambari/server/api/services/DeletePersistenceManager.java

@@ -20,15 +20,18 @@ package org.apache.ambari.server.api.services;
 
 import org.apache.ambari.server.api.resources.ResourceDefinition;
 import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.controller.spi.RequestStatus;
+
 
 /**
  * Responsible for persisting the deletion of a resource in the back end.
  */
 public class DeletePersistenceManager extends BasePersistenceManager {
   @Override
-  public void persist(ResourceDefinition resource) {
+  public RequestStatus persist(ResourceDefinition resource) {
     try {
-      getClusterController().deleteResources(resource.getType(),
+      //todo: need to account for multiple resources and user predicate
+      return getClusterController().deleteResources(resource.getType(),
           resource.getQuery().getInternalPredicate());
     } catch (AmbariException e) {
       //todo: handle exception

+ 25 - 7
ambari-server/src/main/java/org/apache/ambari/server/api/services/HostComponentService.java

@@ -84,7 +84,7 @@ public class HostComponentService extends BaseService {
   }
 
   /**
-   * Handles PUT /clusters/{clusterID}/hosts/{hostID}/host_components/{hostComponentID}
+   * Handles POST /clusters/{clusterID}/hosts/{hostID}/host_components/{hostComponentID}
    * Create a specific host_component.
    *
    * @param body              http body
@@ -94,18 +94,18 @@ public class HostComponentService extends BaseService {
    *
    * @return host_component resource representation
    */
-  @PUT
+  @POST
   @Path("{hostComponentName}")
   @Produces("text/plain")
   public Response createHostComponent(String body, @Context HttpHeaders headers, @Context UriInfo ui,
                                    @PathParam("hostComponentName") String hostComponentName) {
 
-    return handleRequest(headers, body, ui, Request.Type.PUT,
+    return handleRequest(headers, body, ui, Request.Type.POST,
         createResourceDefinition(hostComponentName, m_clusterName, m_hostName));
   }
 
   /**
-   * Handles POST /clusters/{clusterID}/hosts/{hostID}/host_components/{hostComponentID}
+   * Handles PUT /clusters/{clusterID}/hosts/{hostID}/host_components/{hostComponentID}
    * Updates a specific host_component.
    *
    * @param body              http body
@@ -113,18 +113,36 @@ public class HostComponentService extends BaseService {
    * @param ui                uri info
    * @param hostComponentName host_component id
    *
-   * @return host_component resource representation
+   * @return information regarding updated host_component
    */
-  @POST
+  @PUT
   @Path("{hostComponentName}")
   @Produces("text/plain")
   public Response updateHostComponent(String body, @Context HttpHeaders headers, @Context UriInfo ui,
                                       @PathParam("hostComponentName") String hostComponentName) {
 
-    return handleRequest(headers, body, ui, Request.Type.POST,
+    return handleRequest(headers, body, ui, Request.Type.PUT,
         createResourceDefinition(hostComponentName, m_clusterName, m_hostName));
   }
 
+  /**
+   * Handles PUT /clusters/{clusterID}/hosts/{hostID}/host_components
+   * Updates multiple host_component resources.
+   *
+   * @param body              http body
+   * @param headers           http headers
+   * @param ui                uri info
+   *
+   * @return information regarding updated host_component resources
+   */
+  @PUT
+  @Produces("text/plain")
+  public Response updateHostComponents(String body, @Context HttpHeaders headers, @Context UriInfo ui) {
+
+    return handleRequest(headers, body, ui, Request.Type.PUT,
+        createResourceDefinition(null, m_clusterName, m_hostName));
+  }
+
   /**
    * Handles DELETE /clusters/{clusterID}/hosts/{hostID}/host_components/{hostComponentID}
    * Delete a specific host_component.

+ 45 - 11
ambari-server/src/main/java/org/apache/ambari/server/api/services/HostService.java

@@ -18,21 +18,38 @@
 
 package org.apache.ambari.server.api.services;
 
+import javax.ws.rs.DELETE;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+
 import org.apache.ambari.server.api.resources.HostResourceDefinition;
 import org.apache.ambari.server.api.resources.ResourceDefinition;
 
-import javax.ws.rs.*;
-import javax.ws.rs.core.*;
-
 /**
  * Service responsible for hosts resource requests.
  */
+@Path("/hosts/")
 public class HostService extends BaseService {
 
   /**
    * Parent cluster id.
    */
   private String m_clusterName;
+  
+  /**
+   * Constructor.
+   */
+  public HostService() {
+    m_clusterName = null;
+  }
 
   /**
    * Constructor.
@@ -77,7 +94,7 @@ public class HostService extends BaseService {
   }
 
   /**
-   * Handles PUT /clusters/{clusterID}/hosts/{hostID}
+   * Handles POST /clusters/{clusterID}/hosts/{hostID}
    * Create a specific host.
    *
    * @param body     http body
@@ -87,19 +104,18 @@ public class HostService extends BaseService {
    *
    * @return host resource representation
    */
-
-  @PUT
+  @POST
   @Path("{hostName}")
   @Produces("text/plain")
   public Response createHost(String body, @Context HttpHeaders headers, @Context UriInfo ui,
                           @PathParam("hostName") String hostName) {
 
-    return handleRequest(headers, body, ui, Request.Type.PUT,
+    return handleRequest(headers, body, ui, Request.Type.POST,
         createResourceDefinition(hostName, m_clusterName));
   }
 
   /**
-   * Handles POST /clusters/{clusterID}/hosts/{hostID}
+   * Handles PUT /clusters/{clusterID}/hosts/{hostID}
    * Updates a specific host.
    *
    * @param body     http body
@@ -107,18 +123,36 @@ public class HostService extends BaseService {
    * @param ui       uri info
    * @param hostName host id
    *
-   * @return host resource representation
+   * @return information regarding updated host
    */
-  @POST
+  @PUT
   @Path("{hostName}")
   @Produces("text/plain")
   public Response updateHost(String body, @Context HttpHeaders headers, @Context UriInfo ui,
                           @PathParam("hostName") String hostName) {
 
-    return handleRequest(headers, body, ui, Request.Type.POST,
+    return handleRequest(headers, body, ui, Request.Type.PUT,
         createResourceDefinition(hostName, m_clusterName));
   }
 
+  /**
+   * Handles PUT /clusters/{clusterID}/hosts
+   * Updates multiple hosts.
+   *
+   * @param body     http body
+   * @param headers  http headers
+   * @param ui       uri info
+   *
+   * @return information regarding updated host
+   */
+  @PUT
+  @Produces("text/plain")
+  public Response updateHosts(String body, @Context HttpHeaders headers, @Context UriInfo ui) {
+
+    return handleRequest(headers, body, ui, Request.Type.PUT,
+        createResourceDefinition(null, m_clusterName));
+  }
+
   /**
    * Handles DELETE /clusters/{clusterID}/hosts/{hostID}
    * Deletes a specific host.

+ 49 - 0
ambari-server/src/main/java/org/apache/ambari/server/api/services/PersistKeyValueImpl.java

@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.api.services;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import javax.ws.rs.WebApplicationException;
+
+import org.mortbay.jetty.Response;
+
+import com.google.inject.Singleton;
+
+@Singleton
+public class PersistKeyValueImpl {
+  Map<String, String> keyvalues = new HashMap<String, String>();
+  
+  public synchronized String getValue(String key) {
+    if (keyvalues.containsKey(key)) {
+      return keyvalues.get(key);
+    }
+    throw new WebApplicationException(Response.SC_NOT_FOUND);
+  }
+  
+  public synchronized void put(String key, String value) {
+    keyvalues.put(key, value);
+  }
+  
+  public synchronized Map<String, String> getAllKeyValues() {
+    Map<String, String> clone = new HashMap<String, String>(keyvalues);
+    return keyvalues;
+  }
+}

+ 87 - 0
ambari-server/src/main/java/org/apache/ambari/server/api/services/PersistKeyValueService.java

@@ -0,0 +1,87 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.api.services;
+
+import java.io.IOException;
+import java.util.Map;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.Response;
+import javax.xml.bind.JAXBException;
+
+import org.apache.ambari.server.state.fsm.InvalidStateTransitionException;
+import org.apache.ambari.server.utils.StageUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.codehaus.jackson.JsonGenerationException;
+import org.codehaus.jackson.map.JsonMappingException;
+
+import com.google.inject.Inject;
+
+@Path("/persist/")
+public class PersistKeyValueService {
+  private static PersistKeyValueImpl persistKeyVal;
+  private static Log LOG = LogFactory.getLog(PersistKeyValueService.class);
+
+  @Inject
+  public static void init(PersistKeyValueImpl instance) {
+    persistKeyVal = instance;
+  }
+
+  @POST
+  @Produces("text/plain")
+  public Response update(String keyValues,
+      @Context HttpServletRequest req)
+      throws WebApplicationException, InvalidStateTransitionException,
+      JsonGenerationException, JsonMappingException, JAXBException, IOException {
+    LOG.info("Received message from UI " + keyValues);
+    Map<String, String> keyValuesMap = StageUtils.fromJson(keyValues, Map.class);
+    /* Call into the heartbeat handler */
+
+    for (Map.Entry<String, String> keyValue: keyValuesMap.entrySet()) {
+      persistKeyVal.put(keyValue.getKey(), keyValue.getValue());
+    }
+    return Response.status(Response.Status.ACCEPTED).build();
+  }
+  
+  @GET
+  @Produces("text/plain")
+  @Path("{keyName}")
+  public String getKey( @PathParam("keyName") String keyName) {
+    LOG.info("Looking for keyName " + keyName);
+    return persistKeyVal.getValue(keyName);
+  }
+  
+  @GET
+  @Produces("text/plain")
+  public String getAllKeyValues() throws JsonGenerationException,
+    JsonMappingException, JAXBException, IOException {
+    Map<String, String> ret = persistKeyVal.getAllKeyValues();
+    String stringRet = StageUtils.jaxbToString(ret);
+    LOG.info("Returning " + stringRet);
+    return stringRet;
+  }
+}

+ 4 - 1
ambari-server/src/main/java/org/apache/ambari/server/api/services/PersistenceManager.java

@@ -19,6 +19,7 @@
 package org.apache.ambari.server.api.services;
 
 import org.apache.ambari.server.api.resources.ResourceDefinition;
+import org.apache.ambari.server.controller.spi.RequestStatus;
 
 /**
  * Persistence manager which is responsible for persisting a resource state to the back end.
@@ -30,6 +31,8 @@ public interface PersistenceManager {
    *
    * @param resource  the resource to persist
    *
+   * @return the request state.
+   *
    */
-  public void persist(ResourceDefinition resource);
+  public RequestStatus persist(ResourceDefinition resource);
 }

+ 1 - 1
ambari-server/src/main/java/org/apache/ambari/server/api/services/Request.java

@@ -37,8 +37,8 @@ public interface Request {
    */
   public enum Type {
     GET,
-    PUT,
     POST,
+    PUT,
     DELETE
   }
 

+ 39 - 15
ambari-server/src/main/java/org/apache/ambari/server/api/services/RequestImpl.java

@@ -62,6 +62,11 @@ public class RequestImpl implements Request {
    */
   private Type m_Type;
 
+  /**
+   * Predicate operators.
+   */
+  private Pattern m_pattern = Pattern.compile("!=|>=|<=|=|>|<");
+
   /**
    * Associated resource definition
    */
@@ -116,19 +121,13 @@ public class RequestImpl implements Request {
 
     if (qsBegin == -1) return null;
 
-    Pattern pattern = Pattern.compile("!=|>=|<=|=|>|<");
-    String qs = uri.substring(qsBegin + 1);
-    String[] tokens = qs.split("&");
+    String[] tokens = uri.substring(qsBegin + 1).split("&");
 
     Set<Predicate> setPredicates = new HashSet<Predicate>();
     for (String outerToken : tokens) {
-      Matcher m = pattern.matcher(outerToken);
-      m.find();
-      String field = outerToken.substring(0, m.start());
-      if (! field.equals("fields")) {
-        int tokEnd = m.end();
-        String value = outerToken.substring(tokEnd);
-        setPredicates.add(createPredicate(field, m.group(), value));
+      if (outerToken != null &&  !outerToken.startsWith("fields")) {
+        setPredicates.add(outerToken.contains("|") ?
+            handleOrPredicate(outerToken) : createPredicate(outerToken));
       }
     }
 
@@ -206,15 +205,17 @@ public class RequestImpl implements Request {
 
   @Override
   public ResultPostProcessor getResultPostProcessor() {
-    return new ResultPostProcessorImpl(this);
+    //todo: Need to reconsider post processor creation and association with a resource type.
+    //todo: mutating operations return request resources which aren't children of all resources.
+    return getRequestType() == Type.GET ? new ResultPostProcessorImpl(this) : new NullPostProcessor();
   }
 
   @Override
   public PersistenceManager getPersistenceManager() {
     switch (getRequestType()) {
-      case PUT:
-        return new CreatePersistenceManager();
       case POST:
+        return new CreatePersistenceManager();
+      case PUT:
         return new UpdatePersistenceManager();
       case DELETE:
         return new DeletePersistenceManager();
@@ -225,8 +226,14 @@ public class RequestImpl implements Request {
     }
   }
 
-  private Predicate createPredicate(String field, String operator, String value) {
-    PropertyId propertyId = PropertyHelper.getPropertyId(field);
+  private Predicate createPredicate(String token) {
+
+    Matcher m = m_pattern.matcher(token);
+    m.find();
+
+    PropertyId propertyId = PropertyHelper.getPropertyId(token.substring(0, m.start()));
+    String     value      = token.substring(m.end());
+    String     operator   = m.group();
 
     if (operator.equals("=")) {
       return new EqualsPredicate(propertyId, value);
@@ -245,7 +252,24 @@ public class RequestImpl implements Request {
     }
   }
 
+  private Predicate handleOrPredicate(String predicate) {
+    Set<Predicate> setPredicates = new HashSet<Predicate>();
+    String[] tokens = predicate.split("\\|");
+    for (String tok : tokens) {
+      setPredicates.add(createPredicate(tok));
+    }
+
+    return new OrPredicate(setPredicates.toArray(new BasePredicate[setPredicates.size()]));
+  }
+
   private  RequestBodyParser getHttpBodyParser() {
     return new JsonPropertyParser();
   }
+
+  private class NullPostProcessor implements ResultPostProcessor {
+    @Override
+    public void process(Result result) {
+      //no-op
+    }
+  }
 }

+ 109 - 0
ambari-server/src/main/java/org/apache/ambari/server/api/services/RequestService.java

@@ -0,0 +1,109 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.api.services;
+
+
+import org.apache.ambari.server.api.resources.RequestResourceDefinition;
+import org.apache.ambari.server.api.resources.ResourceDefinition;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+
+
+/**
+ * Service responsible for request resource requests.
+ */
+public class RequestService extends BaseService {
+  /**
+   * Parent cluster name.
+   */
+  private String m_clusterName;
+
+
+  /**
+   * Constructor.
+   *
+   * @param clusterName cluster id
+   */
+  public RequestService(String clusterName) {
+    m_clusterName = clusterName;
+  }
+
+  /**
+   * Handles URL: /clusters/{clusterID}/requests/{requestID}
+   * Get a specific request.
+   *
+   * @param headers    http headers
+   * @param ui         uri info
+   * @param requestId  request id
+   *
+   * @return request resource representation
+   */
+  @GET
+  @Path("{requestId}")
+  @Produces("text/plain")
+  public Response getRequest(@Context HttpHeaders headers, @Context UriInfo ui,
+                             @PathParam("requestId") String requestId) {
+
+    return handleRequest(headers, null, ui, Request.Type.GET,
+        createResourceDefinition(requestId, m_clusterName));
+  }
+
+  /**
+   * Handles URL: /clusters/{clusterId}/requests
+   * Get all requests for a cluster.
+   *
+   * @param headers http headers
+   * @param ui      uri info
+   *
+   * @return request collection resource representation
+   */
+  @GET
+  @Produces("text/plain")
+  public Response getRequests(@Context HttpHeaders headers, @Context UriInfo ui) {
+    return handleRequest(headers, null, ui, Request.Type.GET,
+        createResourceDefinition(null, m_clusterName));
+  }
+
+  /**
+   * Gets the tasks sub-resource.
+   */
+  @Path("{requestId}/tasks")
+  public TaskService getTaskHandler(@PathParam("requestId") String requestId) {
+    return new TaskService(m_clusterName, requestId);
+  }
+
+  /**
+   * Create a request resource definition.
+   *
+   * @param requestId    request id
+   * @param clusterName  cluster name
+   *
+   * @return a request resource definition
+   */
+  ResourceDefinition createResourceDefinition(String requestId, String clusterName) {
+    return new RequestResourceDefinition(requestId, clusterName);
+  }
+}

+ 17 - 3
ambari-server/src/main/java/org/apache/ambari/server/api/services/ResponseFactory.java

@@ -27,11 +27,25 @@ public class ResponseFactory {
   /**
    * Create a response from a provided result.
    *
-   * @param result  the result to wrap
+   * @param requestType  request type
+   * @param result       the result to wrap
+   * @param synchronous  if the request has been handled synchronously
    *
    * @return a new jax-rs Response instance for the provided result
    */
-  public Response createResponse(Object result) {
-    return Response.ok(result).build();
+  public Response createResponse(Request.Type requestType, Object result, boolean synchronous) {
+
+    int status = 200;
+
+    if (synchronous) {
+      if (requestType == Request.Type.POST) {
+        //todo: for now not providing a url for create
+        status = 201;
+      }
+    } else {
+      status = 202;
+    }
+
+    return Response.status(status).entity(result).build();
   }
 }

+ 8 - 0
ambari-server/src/main/java/org/apache/ambari/server/api/services/Result.java

@@ -32,4 +32,12 @@ public interface Result {
    * @return the results of the request a a Tree structure
    */
   public TreeNode<Resource> getResultTree();
+
+  /**
+   * Determine whether the request was handle synchronously.
+   * If the request is synchronous, all work was completed prior to returning.
+   *
+   * @return true if the request was synchronous, false if it was asynchronous
+   */
+  public boolean isSynchronous();
 }

+ 13 - 0
ambari-server/src/main/java/org/apache/ambari/server/api/services/ResultImpl.java

@@ -29,15 +29,28 @@ import org.apache.ambari.server.api.util.TreeNodeImpl;
  */
 public class ResultImpl implements Result {
 
+  /**
+   * Whether the request was handled synchronously.
+   */
+  private boolean m_synchronous;
+
   /**
    * Tree structure which holds the results
    */
   private TreeNode<Resource> m_tree = new TreeNodeImpl<Resource>(null, null, null);
 
+  public ResultImpl(boolean synchronous) {
+    m_synchronous = synchronous;
+  }
 
   @Override
   public TreeNode<Resource> getResultTree() {
     return m_tree;
   }
+
+  @Override
+  public boolean isSynchronous() {
+    return m_synchronous;
+  }
 }
 

+ 53 - 8
ambari-server/src/main/java/org/apache/ambari/server/api/services/ServiceService.java

@@ -77,7 +77,7 @@ public class ServiceService extends BaseService {
   }
 
   /**
-   * Handles: PUT /clusters/{clusterId}/services/{serviceId}
+   * Handles: POST /clusters/{clusterId}/services/{serviceId}
    * Create a specific service.
    *
    * @param body        http body
@@ -86,18 +86,35 @@ public class ServiceService extends BaseService {
    * @param serviceName service id
    * @return information regarding the created service
    */
-  @PUT
+  @POST
   @Path("{serviceName}")
   @Produces("text/plain")
   public Response createService(String body, @Context HttpHeaders headers, @Context UriInfo ui,
                                 @PathParam("serviceName") String serviceName) {
 
-    return handleRequest(headers, body, ui, Request.Type.PUT,
+    return handleRequest(headers, body, ui, Request.Type.POST,
         createResourceDefinition(serviceName, m_clusterName));
   }
 
   /**
-   * Handles: POST /clusters/{clusterId}/services/{serviceId}
+   * Handles: POST /clusters/{clusterId}/services
+   * Create multiple services.
+   *
+   * @param body        http body
+   * @param headers     http headers
+   * @param ui          uri info
+   * @return information regarding the created services
+   */
+  @POST
+  @Produces("text/plain")
+  public Response createServices(String body, @Context HttpHeaders headers, @Context UriInfo ui) {
+
+    return handleRequest(headers, body, ui, Request.Type.POST,
+        createResourceDefinition(null, m_clusterName));
+  }
+
+  /**
+   * Handles: PUT /clusters/{clusterId}/services/{serviceId}
    * Update a specific service.
    *
    * @param body        http body
@@ -106,13 +123,29 @@ public class ServiceService extends BaseService {
    * @param serviceName service id
    * @return information regarding the updated service
    */
-  @POST
+  @PUT
   @Path("{serviceName}")
   @Produces("text/plain")
   public Response updateService(String body, @Context HttpHeaders headers, @Context UriInfo ui,
                                 @PathParam("serviceName") String serviceName) {
 
-    return handleRequest(headers, body, ui, Request.Type.POST, createResourceDefinition(serviceName, m_clusterName));
+    return handleRequest(headers, body, ui, Request.Type.PUT, createResourceDefinition(serviceName, m_clusterName));
+  }
+
+  /**
+   * Handles: PUT /clusters/{clusterId}/services
+   * Update multiple services.
+   *
+   * @param body        http body
+   * @param headers     http headers
+   * @param ui          uri info
+   * @return information regarding the updated service
+   */
+  @PUT
+  @Produces("text/plain")
+  public Response updateServices(String body, @Context HttpHeaders headers, @Context UriInfo ui) {
+
+    return handleRequest(headers, body, ui, Request.Type.PUT, createResourceDefinition(null, m_clusterName));
   }
 
   /**
@@ -144,12 +177,24 @@ public class ServiceService extends BaseService {
 
     return new ComponentService(m_clusterName, serviceName);
   }
+  
+  /**
+   * Get the components sub-resource.
+   *
+   * @param serviceName service id
+   * @return the action service
+   */
+  @Path("{serviceName}/actions")
+  public ActionService getActionHandler(@PathParam("serviceName") String serviceName) {
+    return new ActionService(m_clusterName, serviceName);
+  }
 
   /**
    * Create a service resource definition.
    *
-   * @param serviceName host name
-   * @param clusterName cluster name
+   * @param serviceName  service name
+   * @param clusterName  cluster name
+   *
    * @return a service resource definition
    */
   ResourceDefinition createResourceDefinition(String serviceName, String clusterName) {

+ 106 - 0
ambari-server/src/main/java/org/apache/ambari/server/api/services/TaskService.java

@@ -0,0 +1,106 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.api.services;
+
+import org.apache.ambari.server.api.resources.ResourceDefinition;
+import org.apache.ambari.server.api.resources.TaskResourceDefinition;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+
+/**
+ * Service responsible for task resource requests.
+ */
+public class TaskService extends BaseService {
+  /**
+   * Parent cluster id.
+   */
+  private String m_clusterName;
+
+  /**
+   * Parent request id.
+   */
+  private String m_requestId;
+
+  /**
+   * Constructor.
+   *
+   * @param clusterName  cluster id
+   * @param requestId    request id
+   */
+  public TaskService(String clusterName, String requestId) {
+    m_clusterName = clusterName;
+    m_requestId = requestId;
+  }
+
+  /**
+   * Handles GET: /clusters/{clusterID}/requests/{requestID}/tasks/{taskID}
+   * Get a specific task.
+   *
+   * @param headers  http headers
+   * @param ui       uri info
+   * @param taskId   component id
+   *
+   * @return a task resource representation
+   */
+  @GET
+  @Path("{taskId}")
+  @Produces("text/plain")
+  public Response getTask(@Context HttpHeaders headers, @Context UriInfo ui,
+                          @PathParam("taskId") String taskId) {
+
+    return handleRequest(headers, null, ui, Request.Type.GET,
+        createResourceDefinition(taskId, m_clusterName, m_requestId));
+  }
+
+  /**
+   * Handles GET: /clusters/{clusterID}/requests/{requestID}/tasks
+   * Get all tasks for a request.
+   *
+   * @param headers http headers
+   * @param ui      uri info
+   *
+   * @return task collection resource representation
+   */
+  @GET
+  @Produces("text/plain")
+  public Response getComponents(@Context HttpHeaders headers, @Context UriInfo ui) {
+    return handleRequest(headers, null, ui, Request.Type.GET,
+        createResourceDefinition(null, m_clusterName, m_requestId));
+  }
+
+  /**
+   * Create a task resource definition.
+   *
+   * @param clusterName  cluster name
+   * @param requestId    request id
+   * @param taskId       task id
+   *
+   * @return a task resource definition
+   */
+  ResourceDefinition createResourceDefinition(String clusterName, String requestId, String taskId) {
+    return new TaskResourceDefinition(clusterName, requestId, taskId);
+  }
+}

+ 4 - 3
ambari-server/src/main/java/org/apache/ambari/server/api/services/UpdatePersistenceManager.java

@@ -21,6 +21,7 @@ package org.apache.ambari.server.api.services;
 
 import org.apache.ambari.server.api.resources.ResourceDefinition;
 import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.controller.spi.RequestStatus;
 
 
 /**
@@ -28,10 +29,10 @@ import org.apache.ambari.server.AmbariException;
  */
 public class UpdatePersistenceManager extends BasePersistenceManager {
   @Override
-  public void persist(ResourceDefinition resource) {
+  public RequestStatus persist(ResourceDefinition resource) {
     try {
-      getClusterController().updateResources(resource.getType(), createControllerRequest(resource.getProperties()),
-          resource.getQuery().getInternalPredicate());
+      return getClusterController().updateResources(resource.getType(), createControllerRequest(
+          resource.getProperties()), resource.getQuery().getInternalPredicate());
     } catch (AmbariException e) {
       //todo: handle exception
       throw new RuntimeException("Update of resource failed: " + e, e);

+ 0 - 1
ambari-server/src/main/java/org/apache/ambari/server/api/services/parsers/JsonPropertyParser.java

@@ -32,7 +32,6 @@ import java.util.Map;
  * JSON parser which parses a JSON string into a map of properties and values.
  */
 public class JsonPropertyParser implements RequestBodyParser {
-  //todo: change value type to String when it is supported in back end
   private Map<PropertyId, String> m_properties = new HashMap<PropertyId, String>();
 
   @Override

+ 6 - 6
ambari-server/src/main/java/org/apache/ambari/server/bootstrap/BootStrapImpl.java

@@ -132,14 +132,14 @@ public class BootStrapImpl {
     }
 
     private String createHostString(List<String> list) {
-      String ret = "";
+      StringBuilder ret = new StringBuilder();
       if (list == null) {
-        return ret;
+        return "";
       }
       for (String host: list) {
-        ret = ret + host + ",";
+        ret.append(host).append(",");
       }
-      return ret;
+      return ret.toString();
     }
 
     /** Create request id dir for each bootstrap call **/
@@ -201,11 +201,11 @@ public class BootStrapImpl {
           StringWriter writer_1 = new StringWriter();
           IOUtils.copy(process.getInputStream(), writer_1);
           String outMesg = writer_1.toString();
-          if (outMesg == null)  outMesg = "";
+          //if (outMesg == null)  outMesg = "";
           StringWriter writer_2 = new StringWriter();
           IOUtils.copy(process.getErrorStream(), writer_2);
           String errMesg = writer_2.toString();
-          if (errMesg == null)  errMesg = "";
+          //if (errMesg == null)  errMesg = "";
           scriptlog = outMesg + "\n" + errMesg;
           if (exitCode != 0) {
             stat = BSStat.ERROR;

+ 3 - 3
ambari-server/src/main/java/org/apache/ambari/server/bootstrap/SshHostInfo.java

@@ -59,13 +59,13 @@ public class SshHostInfo {
   }
 
   public String hostListAsString() {
-    String ret = "";
+    StringBuilder ret = new StringBuilder();
     if (this.hosts == null) {
       return "";
     }
     for (String host : this.hosts) {
-      ret = ret + host + ":";
+      ret.append(host).append(":");
     }
-    return ret;
+    return ret.toString();
   }
 }

+ 21 - 8
ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java

@@ -45,6 +45,7 @@ public class Configuration {
 
   public static final String CONFIG_FILE = "ambari.properties";
   public static final String BOOTSTRAP_DIR = "bootstrap.dir";
+  public static final String API_AUTHENTICATE = "api.authenticate";
   public static final String BOOTSTRAP_SCRIPT = "bootstrap.script";
   public static final String SRVR_KSTR_DIR_KEY = "security.server.keys_dir";
   public static final String SRVR_CRT_NAME_KEY = "security.server.cert_name";
@@ -78,8 +79,11 @@ public class Configuration {
       "authorization.ldap.managerPassword";
   public static final String LDAP_USERNAME_ATTRIBUTE_KEY =
       "authorization.ldap.usernameAttribute";
-  public static final String LDAP_USER_DEFAULT_ROLE_KEY =
-      "authorization.ldap.userDefaultRole";
+
+  public static final String USER_ROLE_NAME_KEY =
+      "authorization.userRoleName";
+  public static final String ADMIN_ROLE_NAME_KEY =
+      "authorization.adminRoleName";
 
   public static final String PERSISTENCE_IN_MEMORY_KEY =
       "server.persistence.inMemory";
@@ -96,7 +100,8 @@ public class Configuration {
 
   private static final String CLIENT_SECURITY_DEFAULT = "local";
 
-  private static final String LDAP_USER_DEFAULT_ROLE_DEFAULT = "user";
+  private static final String USER_ROLE_NAME_DEFAULT = "user";
+  private static final String ADMIN_ROLE_NAME_DEFAULT = "admin";
   private static final String LDAP_BIND_ANONYMOUSLY_DEFAULT = "true";
 
   //TODO For embedded server only - should be removed later
@@ -146,10 +151,10 @@ public class Configuration {
         PASSPHRASE_ENV_KEY, PASSPHRASE_ENV_DEFAULT));
     configsMap.put(PASSPHRASE_KEY, System.getenv(configsMap.get(
         PASSPHRASE_ENV_KEY)));
-    configsMap.put(CLIENT_SECURITY_KEY, properties.getProperty(
-        CLIENT_SECURITY_KEY, CLIENT_SECURITY_DEFAULT));
-    configsMap.put(LDAP_USER_DEFAULT_ROLE_KEY, properties.getProperty(
-        LDAP_USER_DEFAULT_ROLE_KEY, LDAP_USER_DEFAULT_ROLE_DEFAULT));
+    configsMap.put(USER_ROLE_NAME_KEY, properties.getProperty(
+        USER_ROLE_NAME_KEY, USER_ROLE_NAME_DEFAULT));
+    configsMap.put(ADMIN_ROLE_NAME_KEY, properties.getProperty(
+        ADMIN_ROLE_NAME_KEY, ADMIN_ROLE_NAME_DEFAULT));
     configsMap.put(RESOURCES_DIR_KEY, properties.getProperty(
         RESOURCES_DIR_KEY, RESOURCES_DIR_DEFAULT));
     configsMap.put(SRVR_CRT_PASS_LEN_KEY, properties.getProperty(
@@ -253,9 +258,17 @@ public class Configuration {
    */
   public String getMetadataPath() {
     return properties.getProperty(METADETA_DIR_PATH);
-//    return "src/main/resources/stacks";
   }
 
+  /**
+   * Check to see if the API should be authenticated or not
+   * @return false if not, true if the authentication is enabled.
+   */
+  public boolean getApiAuthentication() {
+    return ("true".equals(properties.getProperty(API_AUTHENTICATE, "false")));
+  }
+  
+  
   public PersistenceType getPersistenceType() {
     String value = properties.getProperty(PERSISTENCE_IN_MEMORY_KEY, PERSISTENCE_IN_MEMORY_DEFAULT);
     if ("true".equalsIgnoreCase(value)) {

+ 70 - 0
ambari-server/src/main/java/org/apache/ambari/server/controller/ActionRequest.java

@@ -0,0 +1,70 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.controller;
+
+import java.util.Map;
+
+public class ActionRequest {
+  private String clusterName; 
+
+  private String serviceName;
+  
+  private String actionName; //for CREATE only
+
+  private Map<String, String> parameters; //for CREATE only
+
+  public ActionRequest(String clusterName, String serviceName,
+      String actionName, Map<String, String> params) {
+    this.clusterName = clusterName;
+    this.serviceName = serviceName;
+    this.actionName = actionName;
+    this.parameters = params;
+  }
+
+  public String getClusterName() {
+    return clusterName;
+  }
+
+  public void setClusterName(String clusterName) {
+    this.clusterName = clusterName;
+  }
+
+  public String getServiceName() {
+    return serviceName;
+  }
+
+  public void setServiceName(String serviceName) {
+    this.serviceName = serviceName;
+  }
+
+  public String getActionName() {
+    return actionName;
+  }
+
+  public void setActionName(String actionName) {
+    this.actionName = actionName;
+  }
+
+  public Map<String, String> getParameters() {
+    return parameters;
+  }
+
+  public void setParameters(Map<String, String> parameters) {
+    this.parameters = parameters;
+  }
+}

+ 51 - 0
ambari-server/src/main/java/org/apache/ambari/server/controller/ActionResponse.java

@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.controller;
+
+
+public class ActionResponse {
+  private String clusterName; 
+
+  private String serviceName;
+  
+  private String actionName;
+
+  public String getClusterName() {
+    return clusterName;
+  }
+
+  public void setClusterName(String clusterName) {
+    this.clusterName = clusterName;
+  }
+
+  public String getServiceName() {
+    return serviceName;
+  }
+
+  public void setServiceName(String serviceName) {
+    this.serviceName = serviceName;
+  }
+
+  public String getActionName() {
+    return actionName;
+  }
+
+  public void setActionName(String actionName) {
+    this.actionName = actionName;
+  }
+}

+ 61 - 52
ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementController.java

@@ -45,7 +45,7 @@ public interface AmbariManagementController {
    *
    * @throws AmbariException thrown if the service cannot be created
    */
-  public void createServices(Set<ServiceRequest> request)
+  public void createServices(Set<ServiceRequest> requests)
       throws AmbariException;
 
   /**
@@ -55,7 +55,7 @@ public interface AmbariManagementController {
    *
    * @throws AmbariException thrown if the component cannot be created
    */
-  public void createComponents(Set<ServiceComponentRequest> request)
+  public void createComponents(Set<ServiceComponentRequest> requests)
       throws AmbariException;
 
   /**
@@ -65,7 +65,7 @@ public interface AmbariManagementController {
    *
    * @throws AmbariException thrown if the host cannot be created
    */
-  public void createHosts(Set<HostRequest> request)
+  public void createHosts(Set<HostRequest> requests)
       throws AmbariException;
 
   /**
@@ -76,108 +76,119 @@ public interface AmbariManagementController {
    * @throws AmbariException thrown if the host component cannot be created
    */
   public void createHostComponents(
-      Set<ServiceComponentHostRequest> request) throws AmbariException;
+      Set<ServiceComponentHostRequest> requests) throws AmbariException;
 
   /**
    * Creates a configuration.
    *
    * @param request the request object which defines the configuration.
    *
-   * @return a track action response
-   *
    * @throws AmbariException when the configuration cannot be created.
    */
-  public TrackActionResponse createConfiguration(ConfigurationRequest request) throws AmbariException;
+  public void createConfiguration(ConfigurationRequest request)
+      throws AmbariException;
 
 
   // ----- Read -------------------------------------------------------------
 
   /**
-   * Get the clusters identified by the given request object.
+   * Get the clusters identified by the given request objects.
    *
-   * @param request  the request object which identifies the clusters to be returned
+   * @param requests  the request objects which identify the clusters to be returned
    *
    * @return a set of cluster responses
    *
    * @throws AmbariException thrown if the resource cannot be read
    */
-  public Set<ClusterResponse> getClusters(ClusterRequest request)
-      throws AmbariException;
   public Set<ClusterResponse> getClusters(Set<ClusterRequest> requests)
       throws AmbariException;
 
   /**
-   * Get the services identified by the given request object.
+   * Get the services identified by the given request objects.
    *
-   * @param request  the request object which identifies the services
+   * @param requests  the request objects which identify the services
    * to be returned
    *
    * @return a set of service responses
    *
    * @throws AmbariException thrown if the resource cannot be read
    */
-  public Set<ServiceResponse> getServices(ServiceRequest request)
-      throws AmbariException;
   public Set<ServiceResponse> getServices(Set<ServiceRequest> requests)
       throws AmbariException;
 
   /**
-   * Get the components identified by the given request object.
+   * Get the components identified by the given request objects.
    *
-   * @param request  the request object which identifies the components to be returned
+   * @param requests  the request objects which identify the components to be returned
    *
    * @return a set of component responses
    *
    * @throws AmbariException thrown if the resource cannot be read
    */
-  public Set<ServiceComponentResponse> getComponents(
-      ServiceComponentRequest request) throws AmbariException;
   public Set<ServiceComponentResponse> getComponents(
       Set<ServiceComponentRequest> requests) throws AmbariException;
 
   /**
-   * Get the hosts identified by the given request object.
+   * Get the hosts identified by the given request objects.
    *
-   * @param request  the request object which identifies the hosts to be returned
+   * @param requests  the request objects which identify the hosts to be returned
    *
    * @return a set of host responses
    *
    * @throws AmbariException thrown if the resource cannot be read
    */
-  public Set<HostResponse> getHosts(HostRequest request)
-      throws AmbariException;
   public Set<HostResponse> getHosts(Set<HostRequest> requests)
       throws AmbariException;
 
   /**
-   * Get the host components identified by the given request object.
+   * Get the host components identified by the given request objects.
    *
-   * @param request  the request object which identifies the host components
+   * @param requests  the request objects which identify the host components
    * to be returned
    *
    * @return a set of host component responses
    *
    * @throws AmbariException thrown if the resource cannot be read
    */
-  public Set<ServiceComponentHostResponse> getHostComponents(
-      ServiceComponentHostRequest request) throws AmbariException;
   public Set<ServiceComponentHostResponse> getHostComponents(
       Set<ServiceComponentHostRequest> requests) throws AmbariException;
 
   /**
-   * Gets the configurations identified by the given request object.
+   * Gets the configurations identified by the given request objects.
    *
-   * @param request   the request object
+   * @param requests   the request objects
    *
    * @return  a set of configuration responses
    *
    * @throws AmbariException if the configurations could not be read
    */
-  public Set<ConfigurationResponse> getConfigurations(
-      ConfigurationRequest request) throws AmbariException;
   public Set<ConfigurationResponse> getConfigurations(
       Set<ConfigurationRequest> requests) throws AmbariException;
 
+  /**
+   * Gets the request status identified by the given request object.
+   *
+   * @param request   the request object
+   *
+   * @return  a set of request status responses
+   *
+   * @throws AmbariException if the request status could not be read
+   */
+  public Set<RequestStatusResponse> getRequestStatus(RequestStatusRequest request)
+      throws AmbariException;
+
+  /**
+   * Gets the task status identified by the given request objects.
+   *
+   * @param requests   the request objects
+   *
+   * @return  a set of task status responses
+   *
+   * @throws AmbariException if the configurations could not be read
+   */
+  public Set<TaskStatusResponse> getTaskStatus(Set<TaskStatusRequest> requests)
+      throws AmbariException;
+
 
   // ----- Update -----------------------------------------------------------
 
@@ -192,7 +203,7 @@ public interface AmbariManagementController {
    *
    * @throws AmbariException thrown if the resource cannot be updated
    */
-  public TrackActionResponse updateCluster(ClusterRequest request)
+  public RequestStatusResponse updateCluster(ClusterRequest request)
       throws AmbariException;
 
   /**
@@ -206,7 +217,7 @@ public interface AmbariManagementController {
    *
    * @throws AmbariException thrown if the resource cannot be updated
    */
-  public TrackActionResponse updateServices(Set<ServiceRequest> request)
+  public RequestStatusResponse updateServices(Set<ServiceRequest> requests)
       throws AmbariException;
 
   /**
@@ -220,8 +231,8 @@ public interface AmbariManagementController {
    *
    * @throws AmbariException thrown if the resource cannot be updated
    */
-  public TrackActionResponse updateComponents(
-      Set<ServiceComponentRequest> request) throws AmbariException;
+  public RequestStatusResponse updateComponents(
+      Set<ServiceComponentRequest> requests) throws AmbariException;
 
   /**
    * Update the host identified by the given request object with the
@@ -232,7 +243,7 @@ public interface AmbariManagementController {
    *
    * @throws AmbariException thrown if the resource cannot be updated
    */
-  public void updateHosts(Set<HostRequest> request)
+  public void updateHosts(Set<HostRequest> requests)
       throws AmbariException;
 
   /**
@@ -246,8 +257,8 @@ public interface AmbariManagementController {
    *
    * @throws AmbariException thrown if the resource cannot be updated
    */
-  public TrackActionResponse updateHostComponents(
-      Set<ServiceComponentHostRequest> request) throws AmbariException;
+  public RequestStatusResponse updateHostComponents(
+      Set<ServiceComponentHostRequest> requests) throws AmbariException;
 
 
   // ----- Delete -----------------------------------------------------------
@@ -257,8 +268,6 @@ public interface AmbariManagementController {
    *
    * @param request  the request object which identifies which cluster to delete
    *
-   * @return a track action response
-   *
    * @throws AmbariException thrown if the resource cannot be deleted
    */
   public void deleteCluster(ClusterRequest request) throws AmbariException;
@@ -266,26 +275,26 @@ public interface AmbariManagementController {
   /**
    * Delete the service identified by the given request object.
    *
-   * @return a track action response
-   *
    * @param requests  the request object which identifies which service to delete
    *
+   * @return a track action response
+   *
    * @throws AmbariException thrown if the resource cannot be deleted
    */
-  public TrackActionResponse deleteServices(Set<ServiceRequest> request)
+  public RequestStatusResponse deleteServices(Set<ServiceRequest> requests)
       throws AmbariException;
 
   /**
    * Delete the component identified by the given request object.
    *
-   * @return a track action response
-   *
    * @param requests  the request object which identifies which component to delete
    *
+   * @return a track action response
+   *
    * @throws AmbariException thrown if the resource cannot be deleted
    */
-  public TrackActionResponse deleteComponents(
-      Set<ServiceComponentRequest> request) throws AmbariException;
+  public RequestStatusResponse deleteComponents(
+      Set<ServiceComponentRequest> requests) throws AmbariException;
 
   /**
    * Delete the host identified by the given request object.
@@ -296,7 +305,7 @@ public interface AmbariManagementController {
    *
    * @throws AmbariException thrown if the resource cannot be deleted
    */
-  public void deleteHosts(Set<HostRequest> request)
+  public void deleteHosts(Set<HostRequest> requests)
       throws AmbariException;
 
   /**
@@ -308,12 +317,12 @@ public interface AmbariManagementController {
    *
    * @throws AmbariException thrown if the resource cannot be deleted
    */
-  public TrackActionResponse deleteHostComponents(
-      Set<ServiceComponentHostRequest> request) throws AmbariException;
+  public RequestStatusResponse deleteHostComponents(
+      Set<ServiceComponentHostRequest> requests) throws AmbariException;
 
-  public TrackActionResponse createOperations(Set<OperationRequest> request)
+  public RequestStatusResponse createActions(Set<ActionRequest> request)
       throws AmbariException;
 
-  public void getOperations(Set<OperationRequest> request)
+  public Set<ActionResponse> getActions(Set<ActionRequest> request)
       throws AmbariException;
 }

文件差異過大導致無法顯示
+ 463 - 332
ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java


+ 62 - 8
ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java

@@ -20,17 +20,26 @@ package org.apache.ambari.server.controller;
 
 
 import java.io.File;
-import java.io.IOException;
 import java.util.Map;
 
+import com.google.inject.persist.Transactional;
+import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.actionmanager.ActionManager;
 import org.apache.ambari.server.agent.HeartBeatHandler;
 import org.apache.ambari.server.agent.rest.AgentResource;
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.ambari.server.api.services.AmbariMetaService;
+import org.apache.ambari.server.api.services.PersistKeyValueImpl;
+import org.apache.ambari.server.api.services.PersistKeyValueService;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
+import org.apache.ambari.server.orm.PersistenceType;
 import org.apache.ambari.server.resources.ResourceManager;
 import org.apache.ambari.server.resources.api.rest.GetResource;
 import org.apache.ambari.server.security.CertificateManager;
+import org.apache.ambari.server.security.authorization.AmbariLdapAuthenticationProvider;
+import org.apache.ambari.server.security.authorization.AmbariLocalUserDetailsService;
+import org.apache.ambari.server.security.authorization.Users;
 import org.apache.ambari.server.security.unsecured.rest.CertificateDownload;
 import org.apache.ambari.server.security.unsecured.rest.CertificateSign;
 import org.apache.ambari.server.state.Clusters;
@@ -39,11 +48,13 @@ import org.mortbay.jetty.handler.ResourceHandler;
 import org.mortbay.jetty.security.SslSocketConnector;
 import org.mortbay.jetty.servlet.Context;
 import org.mortbay.jetty.servlet.DefaultServlet;
+import org.mortbay.jetty.servlet.FilterHolder;
 import org.mortbay.jetty.servlet.ServletHolder;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
 import org.springframework.context.support.ClassPathXmlApplicationContext;
+import org.springframework.security.crypto.password.PasswordEncoder;
 import org.springframework.web.context.WebApplicationContext;
 import org.springframework.web.context.support.GenericWebApplicationContext;
 import org.springframework.web.filter.DelegatingFilterProxy;
@@ -75,8 +86,11 @@ public class AmbariServer {
   CertificateManager certMan;
   @Inject
   Injector injector;
+  @Inject
+  AmbariMetaInfo ambariMetaInfo;
+
   private static AmbariManagementController clusterController = null;
-  
+
   public static AmbariManagementController getController() {
     return clusterController;
   }
@@ -89,6 +103,7 @@ public class AmbariServer {
 
   public void run() {
     performStaticInjection();
+    addInMemoryUsers();
     server = new Server(CLIENT_API_PORT);
     serverForAgent = new Server();
 
@@ -98,7 +113,14 @@ public class AmbariServer {
       parentSpringAppContext.refresh();
       ConfigurableListableBeanFactory factory = parentSpringAppContext.
           getBeanFactory();
-      factory.registerSingleton("guiceInjector", injector);
+      factory.registerSingleton("guiceInjector",
+          injector);
+      factory.registerSingleton("passwordEncoder",
+          injector.getInstance(PasswordEncoder.class));
+      factory.registerSingleton("ambariLocalUserService",
+          injector.getInstance(AmbariLocalUserDetailsService.class));
+      factory.registerSingleton("ambariLdapAuthenticationProvider",
+          injector.getInstance(AmbariLdapAuthenticationProvider.class));
       //Spring Security xml config depends on this Bean
 
       String[] contextLocations = {SPRING_CONTEXT_LOCATION};
@@ -131,8 +153,10 @@ public class AmbariServer {
       //Spring Security Filter initialization
       DelegatingFilterProxy springSecurityFilter = new DelegatingFilterProxy();
       springSecurityFilter.setTargetBeanName("springSecurityFilterChain");
-      //root.addFilter(new FilterHolder(springSecurityFilter), "/*", 1);
 
+      if (configs.getApiAuthentication()) {
+        root.addFilter(new FilterHolder(springSecurityFilter), "/api/*", 1);
+      }
       //Secured connector for 2-way auth
       SslSocketConnector sslConnectorTwoWay = new SslSocketConnector();
       sslConnectorTwoWay.setPort(CLIENT_TWO_WAY);
@@ -207,9 +231,17 @@ public class AmbariServer {
       serverForAgent.setStopAtShutdown(true);
       springAppContext.start();
 
+      LOG.info("********* Initializing Meta Info **********");
+      ambariMetaInfo.init();
+
       //Start action scheduler
       LOG.info("********* Initializing Clusters **********");
       Clusters clusters = injector.getInstance(Clusters.class);
+      StringBuilder clusterDump = new StringBuilder();
+      clusters.debugDump(clusterDump);
+      LOG.info("********* Current Clusters State *********");
+      LOG.info(clusterDump.toString());
+
       LOG.info("********* Initializing ActionManager **********");
       ActionManager manager = injector.getInstance(ActionManager.class);
       LOG.info("********* Initializing Controller **********");
@@ -217,7 +249,7 @@ public class AmbariServer {
           AmbariManagementController.class);
 
       clusterController = controller;
-      
+
       // FIXME need to figure out correct order of starting things to
       // handle restart-recovery correctly
 
@@ -235,7 +267,7 @@ public class AmbariServer {
 //      RequestInjectorForTest testInjector = new RequestInjectorForTest(controller, clusters);
 //      Thread testInjectorThread = new Thread(testInjector);
 //      testInjectorThread.start();
-      
+
       server.join();
       LOG.info("Joined the Server");
     } catch (Exception e) {
@@ -243,6 +275,26 @@ public class AmbariServer {
     }
   }
 
+  /**
+   * Creates default users and roles if in-memory database is used
+   */
+  @Transactional
+  protected void addInMemoryUsers() {
+    if (configs.getPersistenceType() == PersistenceType.IN_MEMORY) {
+      LOG.info("In-memory database is used - creating default users");
+      Users users = injector.getInstance(Users.class);
+
+      users.createDefaultRoles();
+      users.createUser("admin", "admin");
+      users.createUser("user", "user");
+      try {
+        users.promoteToAdmin(users.getLocalUser("admin"));
+      } catch (AmbariException e) {
+        throw new RuntimeException(e);
+      }
+    }
+  }
+
   public void stop() throws Exception {
     try {
       server.stop();
@@ -259,12 +311,14 @@ public class AmbariServer {
     CertificateDownload.init(injector.getInstance(CertificateManager.class));
     CertificateSign.init(injector.getInstance(CertificateManager.class));
     GetResource.init(injector.getInstance(ResourceManager.class));
+    PersistKeyValueService.init(injector.getInstance(PersistKeyValueImpl.class));
+    AmbariMetaService.init(injector.getInstance(AmbariMetaInfo.class));
   }
 
-  public static void main(String[] args) throws IOException {
+  public static void main(String[] args) throws Exception {
 
     Injector injector = Guice.createInjector(new ControllerModule());
-    
+
     try {
       LOG.info("Getting the controller");
       injector.getInstance(GuiceJpaInitializer.class);

+ 12 - 1
ambari-server/src/main/java/org/apache/ambari/server/controller/ClusterResponse.java

@@ -28,12 +28,15 @@ public class ClusterResponse {
 
   private final Set<String> hostNames;
 
+  private final String desiredStackVersion;
+
   public ClusterResponse(Long clusterId, String clusterName,
-      Set<String> hostNames) {
+      Set<String> hostNames, String desiredStackVersion) {
     super();
     this.clusterId = clusterId;
     this.clusterName = clusterName;
     this.hostNames = hostNames;
+    this.desiredStackVersion = desiredStackVersion;
   }
 
   /**
@@ -63,6 +66,7 @@ public class ClusterResponse {
     sb.append("{"
         + " clusterName=" + clusterName
         + ", clusterId=" + clusterId
+        + ", desiredStackVersion=" + desiredStackVersion
         + ", hosts=[");
     if (hostNames != null) {
       int i = 0;
@@ -104,4 +108,11 @@ public class ClusterResponse {
     return result;
   }
 
+  /**
+   * @return the desiredStackVersion
+   */
+  public String getDesiredStackVersion() {
+    return desiredStackVersion;
+  }
+
 }

+ 44 - 11
ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java

@@ -20,9 +20,13 @@ package org.apache.ambari.server.controller;
 import com.google.gson.Gson;
 import com.google.inject.Scopes;
 import com.google.inject.assistedinject.FactoryModuleBuilder;
+import com.google.inject.matcher.Matchers;
+import com.google.inject.persist.Transactional;
 import com.google.inject.persist.jpa.JpaPersistModule;
 import org.apache.ambari.server.actionmanager.*;
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.orm.dao.ClearEntityManagerInterceptor;
 import org.apache.ambari.server.state.*;
 import org.apache.ambari.server.state.cluster.ClusterFactory;
 import org.apache.ambari.server.state.cluster.ClusterImpl;
@@ -33,6 +37,8 @@ import org.apache.ambari.server.state.host.HostImpl;
 import com.google.inject.AbstractModule;
 import com.google.inject.name.Names;
 import org.apache.ambari.server.state.svccomphost.ServiceComponentHostImpl;
+import org.springframework.security.crypto.password.PasswordEncoder;
+import org.springframework.security.crypto.password.StandardPasswordEncoder;
 
 import java.util.Properties;
 
@@ -43,36 +49,63 @@ import java.util.Properties;
 public class ControllerModule extends AbstractModule {
 
   private final Configuration configuration;
+  private final AmbariMetaInfo ambariMetaInfo;
 
-  public ControllerModule() {
+  public ControllerModule() throws Exception {
     configuration = new Configuration();
+    ambariMetaInfo = new AmbariMetaInfo(configuration);
   }
 
-  public ControllerModule(Properties properties) {
+  public ControllerModule(Properties properties) throws Exception {
     configuration = new Configuration(properties);
+    ambariMetaInfo = new AmbariMetaInfo(configuration);
   }
 
   @Override
   protected void configure() {
+    bindInterceptors();
+    installFactories();
+
     bind(Configuration.class).toInstance(configuration);
+    bind(AmbariMetaInfo.class).toInstance(ambariMetaInfo);
+
+    bind(PasswordEncoder.class).toInstance(new StandardPasswordEncoder());
 
     install(new JpaPersistModule(configuration.getPersistenceType().getUnitName()));
 
-    install(new FactoryModuleBuilder().implement(Cluster.class, ClusterImpl.class).build(ClusterFactory.class));
-    install(new FactoryModuleBuilder().implement(Host.class, HostImpl.class).build(HostFactory.class));
-    install(new FactoryModuleBuilder().implement(Service.class, ServiceImpl.class).build(ServiceFactory.class));
-    install(new FactoryModuleBuilder().implement(ServiceComponent.class, ServiceComponentImpl.class).build(ServiceComponentFactory.class));
-    install(new FactoryModuleBuilder().implement(ServiceComponentHost.class, ServiceComponentHostImpl.class).build(ServiceComponentHostFactory.class));
-    install(new FactoryModuleBuilder().implement(Config.class, ConfigImpl.class).build(ConfigFactory.class));
-    install(new FactoryModuleBuilder().build(StageFactory.class));
-    install(new FactoryModuleBuilder().build(HostRoleCommandFactory.class));
 
     bind(Gson.class).in(Scopes.SINGLETON);
     bind(Clusters.class).to(ClustersImpl.class);
     bind(ActionDBAccessor.class).to(ActionDBAccessorImpl.class);
     bindConstant().annotatedWith(Names.named("schedulerSleeptime")).to(10000L);
-    bindConstant().annotatedWith(Names.named("actionTimeout")).to(60000L);
+    bindConstant().annotatedWith(Names.named("actionTimeout")).to(300000L);
     bind(AmbariManagementController.class)
         .to(AmbariManagementControllerImpl.class);
   }
+
+  private void installFactories() {
+    install(new FactoryModuleBuilder().implement(
+        Cluster.class, ClusterImpl.class).build(ClusterFactory.class));
+    install(new FactoryModuleBuilder().implement(
+        Host.class, HostImpl.class).build(HostFactory.class));
+    install(new FactoryModuleBuilder().implement(
+        Service.class, ServiceImpl.class).build(ServiceFactory.class));
+    install(new FactoryModuleBuilder().implement(
+        ServiceComponent.class, ServiceComponentImpl.class).build(
+        ServiceComponentFactory.class));
+    install(new FactoryModuleBuilder().implement(
+        ServiceComponentHost.class, ServiceComponentHostImpl.class).build(
+        ServiceComponentHostFactory.class));
+    install(new FactoryModuleBuilder().implement(
+        Config.class, ConfigImpl.class).build(ConfigFactory.class));
+    install(new FactoryModuleBuilder().build(StageFactory.class));
+    install(new FactoryModuleBuilder().build(HostRoleCommandFactory.class));
+  }
+
+  private void bindInterceptors() {
+    ClearEntityManagerInterceptor clearEntityManagerInterceptor = new ClearEntityManagerInterceptor();
+    requestInjection(clearEntityManagerInterceptor);
+    bindInterceptor(Matchers.any(), Matchers.annotatedWith(Transactional.class), clearEntityManagerInterceptor);
+
+  }
 }

+ 0 - 1
ambari-server/src/main/java/org/apache/ambari/server/controller/HostRequest.java

@@ -62,7 +62,6 @@ public class HostRequest {
   }
 
   public String toString() {
-    // FIXME check if clusters and host attrs is non null before printing
     StringBuilder sb = new StringBuilder();
     sb.append("{"
         + ", hostname=" + hostname

+ 37 - 0
ambari-server/src/main/java/org/apache/ambari/server/controller/RequestStatusRequest.java

@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.controller;
+
+public class RequestStatusRequest {
+
+  private final Long requestId;
+
+  public RequestStatusRequest(Long requestId) {
+    super();
+    this.requestId = requestId;
+  }
+
+  /**
+   * @return the requestId
+   */
+  public Long getRequestId() {
+    return requestId;
+  }
+
+}

+ 71 - 0
ambari-server/src/main/java/org/apache/ambari/server/controller/RequestStatusResponse.java

@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.controller;
+
+import java.util.List;
+
+public class RequestStatusResponse {
+
+  // Request ID for tracking async operations
+  private final Long requestId;
+
+  List<ShortTaskStatus> tasks;
+
+  // TODO how are logs to be sent back?
+  private String logs;
+
+  // TODO stage specific information
+
+  public RequestStatusResponse(Long requestId) {
+    super();
+    this.requestId = requestId;
+  }
+
+  /**
+   * @return the logs
+   */
+  public String getLogs() {
+    return logs;
+  }
+
+  /**
+   * @param logs the logs to set
+   */
+  public void setLogs(String logs) {
+    this.logs = logs;
+  }
+
+  /**
+   * @return the requestId
+   */
+  public long getRequestId() {
+    return requestId;
+  }
+
+  public List<ShortTaskStatus> getTasks() {
+    return tasks;
+  }
+
+  public void setTasks(List<ShortTaskStatus> tasks) {
+    this.tasks = tasks;
+  }
+
+
+
+}

部分文件因文件數量過多而無法顯示