Jelajahi Sumber

Merge branch 'trunk' into branch-dev-patch-upgrade

Nate Cole 9 tahun lalu
induk
melakukan
73aee31ef6
100 mengubah file dengan 2708 tambahan dan 1770 penghapusan
  1. 1 0
      ambari-admin/src/main/resources/ui/admin-web/app/index.html
  2. 1 1
      ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/clusters/UserAccessListCtrl.js
  3. 67 0
      ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/loginActivities/LoginMessageMainCtrl.js
  4. 5 1
      ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
  5. 6 0
      ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
  6. 2 2
      ambari-admin/src/main/resources/ui/admin-web/app/views/clusters/userAccessList.html
  7. 49 1
      ambari-admin/src/main/resources/ui/admin-web/app/views/loginActivities/loginMessage.html
  8. 13 0
      ambari-agent/conf/unix/ambari-agent
  9. 34 0
      ambari-agent/etc/init/ambari-agent.conf
  10. 16 463
      ambari-agent/pom.xml
  11. 166 2
      ambari-agent/src/packages/tarball/all.xml
  12. 46 0
      ambari-common/src/main/python/resource_management/core/global_lock.py
  13. 28 19
      ambari-common/src/main/python/resource_management/libraries/functions/curl_krb_request.py
  14. 14 9
      ambari-common/src/main/python/resource_management/libraries/functions/hive_check.py
  15. 28 0
      ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/MetadataException.java
  16. 163 0
      ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetricMetadata.java
  17. 45 28
      ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStore.java
  18. 248 5
      ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
  19. 9 0
      ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
  20. 20 2
      ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricStore.java
  21. 4 1
      ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/TimelineMetricAggregatorFactory.java
  22. 12 16
      ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/TimelineMetricAppAggregator.java
  23. 4 1
      ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/TimelineMetricClusterAggregatorSecond.java
  24. 56 0
      ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataKey.java
  25. 187 0
      ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataManager.java
  26. 105 0
      ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataSync.java
  27. 35 2
      ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/PhoenixTransactSQL.java
  28. 46 13
      ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java
  29. 10 1
      ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java
  30. 7 1
      ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractMiniHBaseClusterTest.java
  31. 7 2
      ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/ITPhoenixHBaseAccessor.java
  32. 14 0
      ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestTimelineMetricStore.java
  33. 11 5
      ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/ITClusterAggregator.java
  34. 112 0
      ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataManager.java
  35. 15 868
      ambari-server/pom.xml
  36. 289 59
      ambari-server/src/main/assemblies/server.xml
  37. 5 0
      ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AdminSettingResourceProvider.java
  38. 3 1
      ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java
  39. 4 2
      ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertResourceProvider.java
  40. 2 1
      ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariAuthorizationFilter.java
  41. 1 3
      ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
  42. 8 0
      ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog221.java
  43. 15 0
      ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog222.java
  44. 14 0
      ambari-server/src/main/resources/common-services/HAWQ/2.0.0/configuration/hawq-site.xml
  45. 11 4
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_hive_metastore.py
  46. 0 6
      ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_webhcat_server.py
  47. 11 4
      ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/alerts/alert_check_oozie_server.py
  48. 1 1
      ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
  49. 1 1
      ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
  50. 2 2
      ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/kerberos.json
  51. 0 2
      ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-site.xml
  52. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/kerberos.json
  53. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.3/services/ACCUMULO/widgets.json
  54. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/kerberos.json
  55. 8 1
      ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
  56. 5 0
      ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml
  57. 4 76
      ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
  58. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml
  59. 26 5
      ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml
  60. 11 15
      ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
  61. 13 2
      ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AdminSettingResourceProviderTest.java
  62. 4 0
      ambari-server/src/test/java/org/apache/ambari/server/orm/dao/AdminSettingDAOTest.java
  63. 38 0
      ambari-server/src/test/java/org/apache/ambari/server/orm/dao/AlertsDAOTest.java
  64. 47 0
      ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog221Test.java
  65. 4 2
      ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog222Test.java
  66. 55 0
      ambari-server/src/test/python/TestGlobalLock.py
  67. 3 3
      ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_client.py
  68. 79 38
      ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
  69. 0 4
      ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_service_check.py
  70. 11 0
      ambari-server/src/test/python/stacks/2.3/common/services-normal-hawq-3-hosts.json
  71. 34 0
      ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
  72. 12 2
      ambari-server/src/test/python/unitTests.py
  73. 20 2
      ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
  74. 41 0
      ambari-web/app/controllers/main/alert_definitions_controller.js
  75. 17 13
      ambari-web/app/controllers/main/host/combo_search_box.js
  76. 1 1
      ambari-web/app/controllers/main/service/item.js
  77. 1 1
      ambari-web/app/controllers/wizard/step0_controller.js
  78. 2 1
      ambari-web/app/controllers/wizard/step3_controller.js
  79. 1 1
      ambari-web/app/controllers/wizard/step7_controller.js
  80. 7 1
      ambari-web/app/mappers/alert_definition_summary_mapper.js
  81. 2 0
      ambari-web/app/messages.js
  82. 2 1
      ambari-web/app/mixins/main/service/configs/config_overridable.js
  83. 1 0
      ambari-web/app/models/alerts/alert_definition.js
  84. 2 1
      ambari-web/app/models/host_component.js
  85. 43 0
      ambari-web/app/models/service.js
  86. 4 4
      ambari-web/app/templates/main/admin/stack_upgrade/stack_upgrade_wizard.hbs
  87. 4 28
      ambari-web/app/templates/main/alerts.hbs
  88. 31 0
      ambari-web/app/templates/main/alerts/alert_definition/alert_definition_state.hbs
  89. 28 0
      ambari-web/app/templates/main/alerts/alert_definition/alert_definition_summary.hbs
  90. 19 0
      ambari-web/app/utils/ajax/ajax.js
  91. 14 0
      ambari-web/app/utils/ember_reopen.js
  92. 1 1
      ambari-web/app/utils/validator.js
  93. 2 0
      ambari-web/app/views.js
  94. 3 0
      ambari-web/app/views/common/quick_view_link_view.js
  95. 16 3
      ambari-web/app/views/common/sort_view.js
  96. 18 6
      ambari-web/app/views/common/table_view.js
  97. 2 21
      ambari-web/app/views/main/alert_definitions_view.js
  98. 34 0
      ambari-web/app/views/main/alerts/alert_definition/alert_definition_state.js
  99. 65 0
      ambari-web/app/views/main/alerts/alert_definition/alert_definition_summary.js
  100. 12 0
      ambari-web/app/views/main/dashboard/config_history_view.js

+ 1 - 0
ambari-admin/src/main/resources/ui/admin-web/app/index.html

@@ -129,6 +129,7 @@
     <script src="scripts/controllers/NavbarCtrl.js"></script>
     <script src="scripts/controllers/NavbarCtrl.js"></script>
     <script src="scripts/controllers/authentication/AuthenticationMainCtrl.js"></script>
     <script src="scripts/controllers/authentication/AuthenticationMainCtrl.js"></script>
     <script src="scripts/controllers/loginActivities/LoginActivitiesMainCtrl.js"></script>
     <script src="scripts/controllers/loginActivities/LoginActivitiesMainCtrl.js"></script>
+    <script src="scripts/controllers/loginActivities/LoginMessageMainCtrl.js"></script>
     <script src="scripts/controllers/users/UsersCreateCtrl.js"></script>
     <script src="scripts/controllers/users/UsersCreateCtrl.js"></script>
     <script src="scripts/controllers/users/UsersListCtrl.js"></script>
     <script src="scripts/controllers/users/UsersListCtrl.js"></script>
     <script src="scripts/controllers/users/UsersShowCtrl.js"></script>
     <script src="scripts/controllers/users/UsersShowCtrl.js"></script>

+ 1 - 1
ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/clusters/UserAccessListCtrl.js

@@ -22,7 +22,7 @@ angular.module('ambariAdminConsole')
 function($scope, $location, Cluster, $modal, $rootScope, $routeParams, PermissionSaver, Alert, $translate) {
 function($scope, $location, Cluster, $modal, $rootScope, $routeParams, PermissionSaver, Alert, $translate) {
   var $t = $translate.instant;
   var $t = $translate.instant;
   $scope.constants = {
   $scope.constants = {
-    users: $t('common.users').toLowerCase()
+    usersGroups: $t('common.usersGroups').toLowerCase()
   };
   };
   $scope.users = [];
   $scope.users = [];
   $scope.usersPerPage = 10;
   $scope.usersPerPage = 10;

+ 67 - 0
ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/loginActivities/LoginMessageMainCtrl.js

@@ -0,0 +1,67 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+'use strict';
+
+angular.module('ambariAdminConsole')
+  .controller('LoginMessageMainCtrl',['$scope', 'Alert', '$timeout', '$http', '$translate', function($scope, Alert, $timeout, $http, $translate) {
+    var $t = $translate.instant;
+    $scope.status = false;
+    $scope.motdExists = false;
+    $scope.text = "";
+    $scope.submitDisabled = true;
+
+    $http.get('/api/v1/admin-settings/motd').then(function (res) {
+      var respons = JSON.parse(res.data.AdminSettings.content);
+      $scope.text = respons.text ? respons.text : "";
+      $scope.status = respons.status && respons.status == "true" ? true : false;
+      $scope.motdExists = true;
+    });
+
+    $scope.inputChangeEvent = function(){
+      $scope.submitDisabled = false;
+    };
+    $scope.changeStatus = function(){
+      $scope.status = !$scope.status;
+      $scope.submitDisabled = false;
+    };
+
+    $scope.saveLoginMsg = function(form) {
+      var method = $scope.motdExists ? 'PUT' : 'POST';
+      var data = {
+        'AdminSettings' : {
+          'content' : '{"text":"' + $scope.text + '", "status":"' + $scope.status + '"}',
+          'name' : 'motd',
+          'setting_type' : 'ambari-server'
+        }
+      };
+      form.submitted = true;
+      if (form.$valid){
+        $scope.submitDisabled = true;
+        $http({
+          method: method,
+          url: '/api/v1/admin-settings/' + ($scope.motdExists ? 'motd' : ''),
+          data: data
+        }).then(function successCallback() {
+          $scope.motdExists = true;
+        }, function errorCallback(data) {
+          $scope.submitDisabled = false;
+          Alert.error($t('common.loginActivities.saveError'), data.data.message);
+        });
+      }
+    };
+  }]);

+ 5 - 1
ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js

@@ -74,6 +74,7 @@ angular.module('ambariAdminConsole')
       'jwt': 'JWT',
       'jwt': 'JWT',
       'warning': 'Warning',
       'warning': 'Warning',
       'filterInfo': '{{showed}} of {{total}} {{term}} showing',
       'filterInfo': '{{showed}} of {{total}} {{term}} showing',
+      'usersGroups': 'Users/Groups',
 
 
       'clusterNameChangeConfirmation': {
       'clusterNameChangeConfirmation': {
         'title': 'Confirm Cluster Name Change',
         'title': 'Confirm Cluster Name Change',
@@ -83,7 +84,10 @@ angular.module('ambariAdminConsole')
       'loginActivities': {
       'loginActivities': {
         'loginActivities':'Login Activities',
         'loginActivities':'Login Activities',
         'loginMessage': 'Login Message',
         'loginMessage': 'Login Message',
-        'homeDirectory': 'Home Directory'
+        'loginMessage.placeholder': 'Please enter login message',
+        'homeDirectory': 'Home Directory',
+        'onlySimpleChars': 'Must contain only simple characters.',
+        'saveError': 'Save error'
       },
       },
 
 
       'controls': {
       'controls': {

+ 6 - 0
ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css

@@ -682,6 +682,12 @@ table.no-border tr td{
   min-height: 63px;
   min-height: 63px;
 }
 }
 
 
+.login-message-pane .active, .inactive {font-size:30px;cursor:pointer;float: left;margin-left: 17px;}
+.login-message-pane i.active {color: #5cb85c;margin-top: 3px;}
+.login-message-pane i.inactive {color: #d9534f;margin-top: 2px;}
+.login-message-pane .well {height: 74px;}
+.login-message-pane input {margin-left: 3px;}
+
 .views-permissions-panel .panel-body{
 .views-permissions-panel .panel-body{
   padding-bottom: 0;
   padding-bottom: 0;
 }
 }

+ 2 - 2
ambari-admin/src/main/resources/ui/admin-web/app/views/clusters/userAccessList.html

@@ -73,11 +73,11 @@
     </tbody>
     </tbody>
   </table>
   </table>
   <div class="alert alert-info col-sm-12" ng-show="!users.length">
   <div class="alert alert-info col-sm-12" ng-show="!users.length">
-    {{'common.alerts.nothingToDisplay' | translate: '{term: constants.users}'}}
+    {{'common.alerts.nothingToDisplay' | translate: '{term: constants.usersGroups}'}}
   </div>
   </div>
   <div class="col-sm-12 table-bar">
   <div class="col-sm-12 table-bar">
     <div class="pull-left filtered-info">
     <div class="pull-left filtered-info">
-      <span>{{'common.filterInfo' | translate: '{showed: tableInfo.showed, total: tableInfo.total, term: constants.users}'}}</span>
+      <span>{{'common.filterInfo' | translate: '{showed: tableInfo.showed, total: tableInfo.total, term: constants.usersGroups}'}}</span>
       <span ng-show="isNotEmptyFilter">- <a href ng-click="clearFilters()">{{'common.controls.clearFilters' | translate}}</a></span>
       <span ng-show="isNotEmptyFilter">- <a href ng-click="clearFilters()">{{'common.controls.clearFilters' | translate}}</a></span>
     </div>
     </div>
     <div class="pull-right left-margin">
     <div class="pull-right left-margin">

+ 49 - 1
ambari-admin/src/main/resources/ui/admin-web/app/views/loginActivities/loginMessage.html

@@ -16,4 +16,52 @@
 * limitations under the License.
 * limitations under the License.
 -->
 -->
 
 
-Login Message
+<br/>
+<div class="login-message-pane" ng-controller="LoginMessageMainCtrl">
+
+  <form class="form-horizontal" novalidate name="form" autocomplete="off">
+    <div class="well">
+      <div class="form-group" ng-class="{'has-error' : (form.login_text.$error.pattern) && form.submitted}">
+        <i class="fa fa-toggle-on active"
+           ng-if="status == true"
+           ng-click="changeStatus();">
+        </i>
+        <i class="fa fa-toggle-on fa-rotate-180 inactive"
+           ng-if="status == false"
+           ng-click="changeStatus();">
+        </i>
+        <div class="col-sm-11">
+          <input type="text"
+                 ng-disabled="!status"
+                 class="form-control"
+                 name="login_text"
+                 placeholder="{{'common.loginActivities.loginMessage.placeholder' | translate}}"
+                 ng-model="text"
+                 ng-change="inputChangeEvent()"
+                 ng-pattern="/^([a-zA-Z0-9._\s]+)$/"
+                 autocomplete="off">
+
+          <div class="alert alert-danger top-margin" ng-show="form.login_text.$error.pattern && form.submitted">
+            {{'common.loginActivities.onlySimpleChars' | translate}}
+          </div>
+        </div>
+      </div>
+    </div>
+    <div class="form-group">
+      <div class="col-sm-offset-2 col-sm-10">
+        <button
+          class="btn btn-primary groupcreate-btn pull-right left-margin"
+          ng-disabled="submitDisabled"
+          ng-click="saveLoginMsg(form)">
+          {{'common.controls.save' | translate}}
+        </button>
+      </div>
+    </div>
+  </form>
+</div>
+
+
+
+
+
+

+ 13 - 0
ambari-agent/conf/unix/ambari-agent

@@ -157,7 +157,14 @@ case "$1" in
         change_files_permissions
         change_files_permissions
         
         
         echo "Starting ambari-agent"
         echo "Starting ambari-agent"
+        
+        if [ "$AMBARI_AGENT_RUN_IN_FOREGROUND" == true ] ; then
+          $PYTHON $AMBARI_AGENT_PY_SCRIPT "$@" > $OUTFILE 2>&1 
+          exit $?
+        fi
+        
         nohup $PYTHON $AMBARI_AGENT_PY_SCRIPT "$@" > $OUTFILE 2>&1 &
         nohup $PYTHON $AMBARI_AGENT_PY_SCRIPT "$@" > $OUTFILE 2>&1 &
+        
         sleep 2
         sleep 2
         PID=$!
         PID=$!
         echo "Verifying $AMBARI_AGENT process status..."
         echo "Verifying $AMBARI_AGENT process status..."
@@ -219,6 +226,12 @@ case "$1" in
             echo "Stopping $AMBARI_AGENT"
             echo "Stopping $AMBARI_AGENT"
             change_files_permissions
             change_files_permissions
             $PYTHON $AGENT_SCRIPT stop
             $PYTHON $AGENT_SCRIPT stop
+            
+            status ambari-agent 2>/dev/null | grep start 1>/dev/null
+            if [ "$?" -eq 0 ] ; then
+              echo "Stopping $AMBARI_AGENT upstart job"
+              stop ambari-agent > /dev/null
+            fi
           fi
           fi
           echo "Removing PID file at $PIDFILE"
           echo "Removing PID file at $PIDFILE"
           ambari-sudo.sh rm -f $PIDFILE
           ambari-sudo.sh rm -f $PIDFILE

+ 34 - 0
ambari-agent/etc/init/ambari-agent.conf

@@ -0,0 +1,34 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific
+
+#ambari-agent
+description     "ambari agent"
+
+stop on runlevel [06]
+
+env PIDFILE=/var/run/ambari-agent/ambari-agent.pid
+
+kill signal SIGKILL
+respawn
+
+script
+   . /etc/environment
+
+   export AMBARI_AGENT_RUN_IN_FOREGROUND=true
+   exec /etc/init.d/ambari-agent start
+end script
+
+post-stop script
+  rm -f $PIDFILE
+end script

+ 16 - 463
ambari-agent/pom.xml

@@ -50,10 +50,10 @@
     <ambari.server.module>../ambari-server</ambari.server.module>
     <ambari.server.module>../ambari-server</ambari.server.module>
     <target.cache.dir>${project.build.directory}/cache/</target.cache.dir>
     <target.cache.dir>${project.build.directory}/cache/</target.cache.dir>
     <resource.keeper.script>${ambari.server.module}/src/main/python/ambari_server/resourceFilesKeeper.py</resource.keeper.script>
     <resource.keeper.script>${ambari.server.module}/src/main/python/ambari_server/resourceFilesKeeper.py</resource.keeper.script>
-    <init.d.dir>/etc/rc.d/init.d</init.d.dir>
     <resourceManagementSrcLocation>${project.basedir}/../ambari-common/src/main/python/resource_management</resourceManagementSrcLocation>
     <resourceManagementSrcLocation>${project.basedir}/../ambari-common/src/main/python/resource_management</resourceManagementSrcLocation>
     <resourcesFolder>${ambari.server.module}/src/main/resources</resourcesFolder>
     <resourcesFolder>${ambari.server.module}/src/main/resources</resourcesFolder>
     <customActionsLocation>${target.cache.dir}/custom_actions</customActionsLocation>
     <customActionsLocation>${target.cache.dir}/custom_actions</customActionsLocation>
+    <empty.dir>src/packages/tarball</empty.dir> <!-- any directory in project with not very big amount of files (not to waste-load them) -->
   </properties>
   </properties>
   <build>
   <build>
     <plugins>
     <plugins>
@@ -136,26 +136,6 @@
               <goal>exec</goal>
               <goal>exec</goal>
             </goals>
             </goals>
           </execution>
           </execution>
-          <execution>
-            <!-- TODO: Looks like section is unused, maybe remove? -->
-            <configuration>
-              <executable>${executable.python}</executable>
-              <workingDirectory>target${dirsep}ambari-agent-${project.version}</workingDirectory>
-              <arguments>
-                <argument>${project.basedir}${dirsep}src${dirsep}main${dirsep}python${dirsep}setup.py</argument>
-                <argument>clean</argument>
-                <argument>bdist_dumb</argument>
-              </arguments>
-              <environmentVariables>
-                <PYTHONPATH>target${dirsep}ambari-agent-${project.version}${pathsep}$PYTHONPATH</PYTHONPATH>
-              </environmentVariables>
-            </configuration>
-            <id>python-package</id>
-            <phase>package</phase>
-            <goals>
-              <goal>exec</goal>
-            </goals>
-          </execution>
           <execution>
           <execution>
             <configuration>
             <configuration>
               <executable>${executable.python}</executable>
               <executable>${executable.python}</executable>
@@ -179,7 +159,7 @@
       <plugin>
       <plugin>
         <groupId>org.codehaus.mojo</groupId>
         <groupId>org.codehaus.mojo</groupId>
         <artifactId>rpm-maven-plugin</artifactId>
         <artifactId>rpm-maven-plugin</artifactId>
-        <version>2.0.1</version>
+        <version>2.1.4</version>
         <executions>
         <executions>
           <execution>
           <execution>
             <!-- unbinds rpm creation from maven lifecycle -->
             <!-- unbinds rpm creation from maven lifecycle -->
@@ -220,255 +200,43 @@
           <autoRequires>false</autoRequires>
           <autoRequires>false</autoRequires>
           <mappings>
           <mappings>
             <mapping>
             <mapping>
-              <directory>${agent.install.dir}</directory>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>${project.build.directory}${dirsep}${project.artifactId}-${project.version}${dirsep}ambari_agent</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-agent/</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <directoryIncluded>false</directoryIncluded>
-              <sources>
-                <source>
-                  <location>../ambari-common/src/main/unix/ambari-python-wrap</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-agent/</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <directoryIncluded>false</directoryIncluded>
-              <sources>
-                <source>
-                  <location>conf/unix/ambari-sudo.sh</location>
-                </source>
-              </sources>
-            </mapping>         
-            <mapping>
-              <directory>${ambari_commons.install.dir}</directory>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>${project.basedir}/../ambari-common/src/main/python/ambari_commons</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>${resource_management.install.dir}</directory>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>${resourceManagementSrcLocation}</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>${jinja.install.dir}</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>${project.basedir}/../ambari-common/src/main/python/ambari_jinja2/ambari_jinja2</location>
-                  <excludes>
-                    <exclude>${project.basedir}/../ambari-common/src/main/python/ambari_jinja2/ambari_jinja2/testsuite</exclude>
-                  </excludes>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>${simplejson.install.dir}</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>${project.basedir}/../ambari-common/src/main/python/ambari_simplejson/</location>
-                </source>
-              </sources>
-            </mapping>
-             <mapping>
-              <directory>${lib.dir}/examples</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>src/examples</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/etc/ambari-agent/conf</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>conf/unix/ambari-agent.ini</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/etc/ambari-agent/conf</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>conf/unix/logging.conf.sample</location>
-                </source>
-              </sources>
-            </mapping>              
-            <mapping>
-              <directory>/usr/sbin</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <directoryIncluded>false</directoryIncluded>
-              <sources>
-                <source>
-                  <location>conf/unix/ambari-agent</location>
-                  <filter>true</filter>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-agent</directory>
-              <configuration>true</configuration>
-              <filemode>700</filemode>
+              <directory>/etc</directory>
               <username>root</username>
               <username>root</username>
               <groupname>root</groupname>
               <groupname>root</groupname>
+              <directoryIncluded>false</directoryIncluded> <!-- avoid managing /etc/init.d -->
               <sources>
               <sources>
                 <source>
                 <source>
-                  <location>conf/unix/ambari-env.sh</location>
+                  <location>${project.build.directory}${dirsep}${project.artifactId}-${project.version}/etc</location>
                 </source>
                 </source>
               </sources>
               </sources>
             </mapping>
             </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-agent</directory>
-              <filemode>700</filemode>
+			<mapping>
+              <directory>/usr</directory>
               <username>root</username>
               <username>root</username>
               <groupname>root</groupname>
               <groupname>root</groupname>
               <sources>
               <sources>
                 <source>
                 <source>
-                  <location>conf/unix/install-helper.sh</location>
+                  <location>${project.build.directory}${dirsep}${project.artifactId}-${project.version}/usr</location>
                 </source>
                 </source>
               </sources>
               </sources>
             </mapping>
             </mapping>
             <mapping>
             <mapping>
-              <directory>/var/lib/ambari-agent</directory>
-              <filemode>700</filemode>
+              <directory>/var</directory>
               <username>root</username>
               <username>root</username>
               <groupname>root</groupname>
               <groupname>root</groupname>
               <sources>
               <sources>
                 <source>
                 <source>
-                  <location>conf/unix/upgrade_agent_configs.py</location>
+                  <location>${project.build.directory}${dirsep}${project.artifactId}-${project.version}/var</location>
                 </source>
                 </source>
               </sources>
               </sources>
             </mapping>
             </mapping>
-            <mapping>
-              <directory>${package.pid.dir}</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/${project.artifactId}/data</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/${project.artifactId}/tmp</directory>
-              <filemode>777</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/${project.artifactId}/keys</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-            </mapping>
-            <mapping>
-              <directory>${package.log.dir}</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-            </mapping>
-            <mapping>
-              <directory>${init.d.dir}</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>etc/init.d/ambari-agent</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/${project.artifactId}/data</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>../version</location>
-                  <filter>true</filter>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-agent/cache</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>${target.cache.dir}</location>
-                  <excludes>
-                    <exclude>custom_actions/scripts/*</exclude>
-                  </excludes>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-agent/cache/custom_actions</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>${customActionsLocation}</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-agent/lib</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-            </mapping>
           </mappings>
           </mappings>
         </configuration>
         </configuration>
       </plugin>
       </plugin>
       <plugin>
       <plugin>
         <groupId>org.vafer</groupId>
         <groupId>org.vafer</groupId>
         <artifactId>jdeb</artifactId>
         <artifactId>jdeb</artifactId>
-        <version>1.0.1</version>
+        <version>1.4</version>
         <executions>
         <executions>
           <execution>
           <execution>
             <phase>none</phase>
             <phase>none</phase>
@@ -480,223 +248,14 @@
         <configuration>
         <configuration>
           <controlDir>${basedir}/src/main/package/deb/control</controlDir>
           <controlDir>${basedir}/src/main/package/deb/control</controlDir>
           <deb>${basedir}/target/${project.artifactId}_${package-version}-${package-release}.deb</deb>
           <deb>${basedir}/target/${project.artifactId}_${package-version}-${package-release}.deb</deb>
+          <skip>false</skip>
+          <skipPOMs>false</skipPOMs>
           <dataSet>
           <dataSet>
             <data>
             <data>
-              <src>${project.build.directory}/${project.artifactId}-${project.version}/ambari_agent</src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>${agent.install.dir}</prefix>
-                <user>root</user>
-                <group>root</group>
-              </mapper>
-            </data>
-            <data>
-              <src>${project.basedir}/../ambari-common/src/main/python/ambari_jinja2/ambari_jinja2</src>
-              <excludes>${project.basedir}/../ambari-common/src/main/python/ambari_jinja2/ambari_jinja2/testsuite</excludes>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>${jinja.install.dir}</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>${project.basedir}/../ambari-common/src/main/python/ambari_simplejson</src>
-              <type>directory</type>
+              <src>${project.build.directory}${dirsep}${project.artifactId}-${project.version}.tar.gz</src>
+              <type>archive</type>
               <mapper>
               <mapper>
                 <type>perm</type>
                 <type>perm</type>
-                <prefix>${simplejson.install.dir}</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>src/examples</src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>${lib.dir}/examples</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>conf/unix/ambari-agent.ini</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/etc/ambari-agent/conf</prefix>
-                  <user>root</user>
-                  <group>root</group>
-                  <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>conf/unix/logging.conf.sample</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/etc/ambari-agent/conf</prefix>
-                  <user>root</user>
-                  <group>root</group>
-                  <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>${basedir}/target/src/ambari-agent</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/usr/sbin</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>conf/unix/ambari-env.sh</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-agent</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>700</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>conf/unix/install-helper.sh</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-agent</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>700</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>conf/unix/upgrade_agent_configs.py</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-agent</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>700</filemode>
-              </mapper>
-            </data>
-            <data>
-              <type>template</type>
-              <paths>
-                <path>${package.pid.dir}</path>
-                <path>/var/lib/${project.artifactId}/data</path>
-                <path>/var/lib/${project.artifactId}/tmp</path>
-                <path>/var/lib/${project.artifactId}/keys</path>
-                <path>${package.log.dir}</path>
-                <path>/var/lib/${project.artifactId}/lib</path>
-              </paths>
-              <mapper>
-                <type>perm</type>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>etc/init.d/ambari-agent</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/etc/init.d</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>${basedir}/target/src/version</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/${project.artifactId}/data</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>../ambari-common/src/main/unix/ambari-python-wrap</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-agent</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>conf/unix/ambari-sudo.sh</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-agent</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>${target.cache.dir}</src>
-              <type>directory</type>
-              <excludes>custom_actions/scripts/*</excludes>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-agent/cache</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>${customActionsLocation}</src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-agent/cache/custom_actions</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>
-                ${project.basedir}/../ambari-common/src/main/python/ambari_commons
-              </src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>${ambari_commons.install.dir}</prefix>
-                <filemode>755</filemode>
-                <user>root</user>
-                <group>root</group>
-              </mapper>
-            </data>
-            <data>
-              <src>
-                ${resourceManagementSrcLocation}
-              </src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>${resource_management.install.dir}</prefix>
-                <filemode>755</filemode>
                 <user>root</user>
                 <user>root</user>
                 <group>root</group>
                 <group>root</group>
               </mapper>
               </mapper>
@@ -751,7 +310,7 @@
           </execution>
           </execution>
            <execution>
            <execution>
             <id>copy-resources-filter</id>
             <id>copy-resources-filter</id>
-            <phase>prepare-package</phase>
+            <phase>generate-resources</phase>
             <goals>
             <goals>
               <goal>copy-resources</goal>
               <goal>copy-resources</goal>
             </goals>
             </goals>
@@ -940,12 +499,6 @@
         </plugins>
         </plugins>
       </build>
       </build>
     </profile>
     </profile>
-    <profile>
-      <id>suse11</id>
-      <properties>
-        <init.d.dir>/etc/init.d</init.d.dir>
-      </properties>
-    </profile>
     <profile>
     <profile>
       <id>pluggable-stack-definition</id>
       <id>pluggable-stack-definition</id>
       <activation>
       <activation>

+ 166 - 2
ambari-agent/src/packages/tarball/all.xml

@@ -23,12 +23,176 @@
   -->
   -->
   <formats>
   <formats>
     <format>dir</format>
     <format>dir</format>
+    <format>tar.gz</format>
   </formats>
   </formats>
   <includeBaseDirectory>false</includeBaseDirectory>
   <includeBaseDirectory>false</includeBaseDirectory>
+  <!-- File sets. Syntax:
+	  <fileSets>
+	    <fileSet>
+	      <useDefaultExcludes/>
+	      <outputDirectory/>
+	      <includes/>
+	      <excludes/>
+	      <fileMode/>
+	      <directoryMode/>
+	      <directory/>
+	      <lineEnding/>
+	      <filtered/>
+	    </fileSet>
+	  </fileSets>
+  -->
   <fileSets>
   <fileSets>
     <fileSet>
     <fileSet>
-      <directory>src/main/python</directory>
-      <outputDirectory>/</outputDirectory>
+      <directoryMode>755</directoryMode>
+      <directory>src/main/python/ambari_agent</directory>
+      <outputDirectory>${agent.install.dir}</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directoryMode>755</directoryMode>
+      <directory>${project.basedir}/../ambari-common/src/main/python/ambari_commons</directory>
+      <outputDirectory>${ambari_commons.install.dir}</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directoryMode>755</directoryMode>
+      <directory>${resourceManagementSrcLocation}</directory>
+      <outputDirectory>${resource_management.install.dir}</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directoryMode>755</directoryMode>
+      <directory>${project.basedir}/../ambari-common/src/main/python/ambari_jinja2/ambari_jinja2</directory>
+      <outputDirectory>${jinja.install.dir}</outputDirectory>
+      <excludes>
+      	<exclude>**/testsuite/**</exclude>
+      </excludes>
+    </fileSet>
+    <fileSet>
+      <directoryMode>755</directoryMode>
+      <directory>${project.basedir}/../ambari-common/src/main/python/ambari_simplejson</directory>
+      <outputDirectory>${simplejson.install.dir}</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directoryMode>755</directoryMode>
+      <directory>src/examples</directory>
+      <outputDirectory>${lib.dir}/examples</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directoryMode>755</directoryMode>
+      <directory>${empty.dir}</directory>
+      <outputDirectory>${package.pid.dir}</outputDirectory>
+	  <excludes>
+	    <exclude>*/**</exclude>
+	  </excludes>
+    </fileSet>
+    <fileSet>
+      <directoryMode>777</directoryMode>
+      <directory>${empty.dir}</directory>
+      <outputDirectory>/var/lib/${project.artifactId}/tmp</outputDirectory>
+	  <excludes>
+	    <exclude>*/**</exclude>
+	  </excludes>
+    </fileSet>
+    <fileSet>
+      <directoryMode>755</directoryMode>
+      <directory>${empty.dir}</directory>
+      <outputDirectory>/var/lib/${project.artifactId}/keys</outputDirectory>
+	  <excludes>
+	    <exclude>*/**</exclude>
+	  </excludes>
+    </fileSet>
+    <fileSet>
+      <directoryMode>755</directoryMode>
+      <directory>${empty.dir}</directory>
+      <outputDirectory>${package.log.dir}</outputDirectory>
+	  <excludes>
+	    <exclude>*/**</exclude>
+	  </excludes>
+    </fileSet>
+    <fileSet>
+      <directoryMode>755</directoryMode>
+      <directory>${empty.dir}</directory>
+      <outputDirectory>/var/lib/ambari-agent/lib</outputDirectory>
+	  <excludes>
+	    <exclude>*/**</exclude>
+	  </excludes>
+    </fileSet>
+    <fileSet>
+      <directoryMode>755</directoryMode>
+      <directory>${target.cache.dir}</directory>
+      <outputDirectory>/var/lib/ambari-agent/cache</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directoryMode>755</directoryMode>
+      <directory>${pluggableStackDefinitionOutput}/custom_actions</directory>
+      <outputDirectory>/var/lib/ambari-agent/cache/custom_actions</outputDirectory>
     </fileSet>
     </fileSet>
   </fileSets>
   </fileSets>
+  <!-- Single files. Syntax:
+	  <files>
+	    <file>
+	      <source/>
+	      <outputDirectory/>
+	      <destName/>
+	      <fileMode/>
+	      <lineEnding/>
+	      <filtered/>
+	    </file>
+	  </files>
+  -->
+  <files>
+    <file>
+      <fileMode>755</fileMode>
+      <source>../ambari-common/src/main/unix/ambari-python-wrap</source>
+      <outputDirectory>/var/lib/ambari-agent/</outputDirectory>
+    </file>
+    <file>
+      <fileMode>755</fileMode>
+      <source>conf/unix/ambari-sudo.sh</source>
+      <outputDirectory>/var/lib/ambari-agent/</outputDirectory>
+    </file>
+    <file>
+      <fileMode>644</fileMode>
+      <source>conf/unix/ambari-agent.ini</source>
+      <outputDirectory>/etc/ambari-agent/conf</outputDirectory>
+    </file>
+    <file>
+      <fileMode>644</fileMode>
+      <source>conf/unix/logging.conf.sample</source>
+      <outputDirectory>/etc/ambari-agent/conf</outputDirectory>
+    </file>
+    <file>
+      <fileMode>755</fileMode>
+      <source>conf/unix/ambari-agent</source>
+      <outputDirectory>/usr/sbin</outputDirectory>
+    </file>
+    <file>
+      <fileMode>700</fileMode>
+      <source>conf/unix/ambari-env.sh</source>
+      <outputDirectory>/var/lib/ambari-agent</outputDirectory>
+    </file>
+    <file>
+      <fileMode>700</fileMode>
+      <source>conf/unix/install-helper.sh</source>
+      <outputDirectory>/var/lib/ambari-agent</outputDirectory>
+    </file>
+    <file>
+      <fileMode>700</fileMode>
+      <source>conf/unix/upgrade_agent_configs.py</source>
+      <outputDirectory>/var/lib/ambari-agent</outputDirectory>
+    </file>
+    <file>
+      <fileMode>644</fileMode>
+      <source>etc/init/ambari-agent.conf</source>
+      <outputDirectory>/etc/init</outputDirectory>
+    </file>
+    <file>
+      <fileMode>755</fileMode>
+      <source>etc/init.d/ambari-agent</source>
+      <outputDirectory>/etc/init.d/ambari-agent</outputDirectory>
+    </file>
+    <file>
+      <fileMode>644</fileMode>
+      <source>${basedir}/target/src/version</source>
+      <outputDirectory>/var/lib/${project.artifactId}/data</outputDirectory>
+    </file>
+  </files>
 </assembly>
 </assembly>

+ 46 - 0
ambari-common/src/main/python/resource_management/core/global_lock.py

@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+import threading
+from resource_management.core.exceptions import Fail
+
+# concurrent kinit's can cause the following error:
+# Internal credentials cache error while storing credentials while getting initial credentials
+LOCK_TYPE_KERBEROS = "KERBEROS_LOCK"
+
+# dictionary of all global lock instances
+__GLOBAL_LOCKS = {
+  LOCK_TYPE_KERBEROS : threading.RLock()
+}
+
+def get_lock(lock_type):
+  """
+  Gets the global lock associated with the specified type. This does not actually acquire
+  the lock, it simply returns the RLock instance. It is up to the caller to invoke RLock.acquire()
+  and RLock.release() correctly.
+  :param lock_type:
+  :return: a global threading.RLock() instance
+  :rtype: threading.RLock()
+  """
+  if lock_type not in __GLOBAL_LOCKS:
+    raise Fail("There is no global lock associated with {0}".format(str(lock_type)))
+
+  return __GLOBAL_LOCKS[lock_type]

+ 28 - 19
ambari-common/src/main/python/resource_management/libraries/functions/curl_krb_request.py

@@ -24,13 +24,15 @@ __all__ = ["curl_krb_request"]
 import logging
 import logging
 import os
 import os
 import time
 import time
-import subprocess
+import threading
 
 
+from resource_management.core import global_lock
 from resource_management.core import shell
 from resource_management.core import shell
 from resource_management.core.exceptions import Fail
 from resource_management.core.exceptions import Fail
 from get_kinit_path import get_kinit_path
 from get_kinit_path import get_kinit_path
 from get_klist_path import get_klist_path
 from get_klist_path import get_klist_path
 from resource_management.libraries.functions.get_user_call_output import get_user_call_output
 from resource_management.libraries.functions.get_user_call_output import get_user_call_output
+
 # hashlib is supplied as of Python 2.5 as the replacement interface for md5
 # hashlib is supplied as of Python 2.5 as the replacement interface for md5
 # and other secure hashes.  In 2.6, md5 is deprecated.  Import hashlib if
 # and other secure hashes.  In 2.6, md5 is deprecated.  Import hashlib if
 # available, avoiding a deprecation warning under 2.6.  Import md5 otherwise,
 # available, avoiding a deprecation warning under 2.6.  Import md5 otherwise,
@@ -47,7 +49,6 @@ MAX_TIMEOUT_DEFAULT = CONNECTION_TIMEOUT_DEFAULT + 2
 
 
 logger = logging.getLogger()
 logger = logging.getLogger()
 
 
-
 def curl_krb_request(tmp_dir, keytab, principal, url, cache_file_prefix,
 def curl_krb_request(tmp_dir, keytab, principal, url, cache_file_prefix,
     krb_exec_search_paths, return_only_http_code, alert_name, user,
     krb_exec_search_paths, return_only_http_code, alert_name, user,
     connection_timeout = CONNECTION_TIMEOUT_DEFAULT):
     connection_timeout = CONNECTION_TIMEOUT_DEFAULT):
@@ -62,25 +63,33 @@ def curl_krb_request(tmp_dir, keytab, principal, url, cache_file_prefix,
   ccache_file_path = "{0}{1}{2}_{3}_cc_{4}".format(tmp_dir, os.sep, cache_file_prefix, user, ccache_file_name)
   ccache_file_path = "{0}{1}{2}_{3}_cc_{4}".format(tmp_dir, os.sep, cache_file_prefix, user, ccache_file_name)
   kerberos_env = {'KRB5CCNAME': ccache_file_path}
   kerberos_env = {'KRB5CCNAME': ccache_file_path}
 
 
-  # If there are no tickets in the cache or they are expired, perform a kinit, else use what
-  # is in the cache
-  if krb_exec_search_paths:
-    klist_path_local = get_klist_path(krb_exec_search_paths)
-  else:
-    klist_path_local = get_klist_path()
-
-  if shell.call("{0} -s {1}".format(klist_path_local, ccache_file_path), user=user)[0] != 0:
+  # concurrent kinit's can cause the following error:
+  # Internal credentials cache error while storing credentials while getting initial credentials
+  kinit_lock = global_lock.get_lock(global_lock.LOCK_TYPE_KERBEROS)
+  kinit_lock.acquire()
+  try:
+    # If there are no tickets in the cache or they are expired, perform a kinit, else use what
+    # is in the cache
     if krb_exec_search_paths:
     if krb_exec_search_paths:
-      kinit_path_local = get_kinit_path(krb_exec_search_paths)
+      klist_path_local = get_klist_path(krb_exec_search_paths)
     else:
     else:
-      kinit_path_local = get_kinit_path()
-    logger.debug("[Alert][{0}] Enabling Kerberos authentication via GSSAPI using ccache at {1}.".format(
-      alert_name, ccache_file_path))
-
-    shell.checked_call("{0} -l 5m -c {1} -kt {2} {3} > /dev/null".format(kinit_path_local, ccache_file_path, keytab, principal), user=user)
-  else:
-    logger.debug("[Alert][{0}] Kerberos authentication via GSSAPI already enabled using ccache at {1}.".format(
-      alert_name, ccache_file_path))
+      klist_path_local = get_klist_path()
+
+    if shell.call("{0} -s {1}".format(klist_path_local, ccache_file_path), user=user)[0] != 0:
+      if krb_exec_search_paths:
+        kinit_path_local = get_kinit_path(krb_exec_search_paths)
+      else:
+        kinit_path_local = get_kinit_path()
+
+      logger.debug("[Alert][{0}] Enabling Kerberos authentication via GSSAPI using ccache at {1}.".format(
+        alert_name, ccache_file_path))
+
+      shell.checked_call("{0} -l 5m -c {1} -kt {2} {3} > /dev/null".format(kinit_path_local, ccache_file_path, keytab, principal), user=user)
+    else:
+      logger.debug("[Alert][{0}] Kerberos authentication via GSSAPI already enabled using ccache at {1}.".format(
+        alert_name, ccache_file_path))
+  finally:
+    kinit_lock.release()
 
 
   # check if cookies dir exists, if not then create it
   # check if cookies dir exists, if not then create it
   cookies_dir = os.path.join(tmp_dir, "cookies")
   cookies_dir = os.path.join(tmp_dir, "cookies")

+ 14 - 9
ambari-common/src/main/python/resource_management/libraries/functions/hive_check.py

@@ -18,9 +18,7 @@ See the License for the specific language governing permissions and
 limitations under the License.
 limitations under the License.
 '''
 '''
 
 
-import socket
-
-from resource_management.core.exceptions import Fail
+from resource_management.core import global_lock
 from resource_management.core.resources import Execute
 from resource_management.core.resources import Execute
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
 
 
@@ -55,15 +53,22 @@ def check_thrift_port_sasl(address, port, hive_auth="NOSASL", key=None, kinitcmd
   if ssl and ssl_keystore is not None and ssl_password is not None:
   if ssl and ssl_keystore is not None and ssl_password is not None:
     beeline_url.extend(['ssl={ssl_str}', 'sslTrustStore={ssl_keystore}', 'trustStorePassword={ssl_password!p}'])
     beeline_url.extend(['ssl={ssl_str}', 'sslTrustStore={ssl_keystore}', 'trustStorePassword={ssl_password!p}'])
 
 
-  # append url according to kerberos setting
+  # append url according to principal and execute kinit
   if kinitcmd:
   if kinitcmd:
     beeline_url.append('principal={key}')
     beeline_url.append('principal={key}')
-    Execute(kinitcmd, user=smokeuser)
+
+    # prevent concurrent kinit
+    kinit_lock = global_lock.get_lock(global_lock.LOCK_TYPE_KERBEROS)
+    kinit_lock.acquire()
+    try:
+      Execute(kinitcmd, user=smokeuser)
+    finally:
+      kinit_lock.release()
 
 
   cmd = "! beeline -u '%s' -e '' 2>&1| awk '{print}'|grep -i -e 'Connection refused' -e 'Invalid URL'" % \
   cmd = "! beeline -u '%s' -e '' 2>&1| awk '{print}'|grep -i -e 'Connection refused' -e 'Invalid URL'" % \
         format(";".join(beeline_url))
         format(";".join(beeline_url))
+
   Execute(cmd,
   Execute(cmd,
-          user=smokeuser,
-          path=["/bin/", "/usr/bin/", "/usr/lib/hive/bin/", "/usr/sbin/"],
-          timeout=check_command_timeout
-  )
+    user=smokeuser,
+    path=["/bin/", "/usr/bin/", "/usr/lib/hive/bin/", "/usr/sbin/"],
+    timeout=check_command_timeout)

+ 28 - 0
ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/MetadataException.java

@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.metrics2.sink.timeline;
+
+/**
+ * Marker for checked Exceptions thrown from Metadata management layer.
+ */
+public class MetadataException extends Exception {
+  // Default constructor
+  public MetadataException(String message) {
+    super(message);
+  }
+}

+ 163 - 0
ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/TimelineMetricMetadata.java

@@ -0,0 +1,163 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.metrics2.sink.timeline;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.codehaus.jackson.annotate.JsonIgnore;
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlRootElement;
+
+@XmlRootElement(name = "metric_metadata")
+@XmlAccessorType(XmlAccessType.NONE)
+@InterfaceAudience.Public
+@InterfaceStability.Unstable
+public class TimelineMetricMetadata {
+  private String metricName;
+  private String appId;
+  private String units;
+  private MetricType type = MetricType.UNDEFINED;
+  private Long seriesStartTime;
+  boolean supportsAggregates = true;
+  // Serialization ignored helper flag
+  boolean isPersisted = false;
+
+  public enum MetricType {
+    GAUGE, // Can vary in both directions
+    COUNTER, // Single dimension
+    UNDEFINED // Default
+  }
+
+  // Default constructor
+  public TimelineMetricMetadata() {
+  }
+
+  public TimelineMetricMetadata(String metricName, String appId, String units,
+                                MetricType type, Long seriesStartTime,
+                                boolean supportsAggregates) {
+    this.metricName = metricName;
+    this.appId = appId;
+    this.units = units;
+    this.type = type;
+    this.seriesStartTime = seriesStartTime;
+    this.supportsAggregates = supportsAggregates;
+  }
+
+  @XmlElement(name = "metricname")
+  public String getMetricName() {
+    return metricName;
+  }
+
+  public void setMetricName(String metricName) {
+    this.metricName = metricName;
+  }
+
+  // This is the key for the webservice hence ignored.
+  //@XmlElement(name = "appid")
+  public String getAppId() {
+    return appId;
+  }
+
+  public void setAppId(String appId) {
+    this.appId = appId;
+  }
+
+  @XmlElement(name = "units")
+  public String getUnits() {
+    return units;
+  }
+
+  public void setUnits(String units) {
+    this.units = units;
+  }
+
+  @XmlElement(name = "type")
+  public MetricType getType() {
+    return type;
+  }
+
+  public void setType(MetricType type) {
+    this.type = type;
+  }
+
+  @XmlElement(name = "seriesStartTime")
+  public Long getSeriesStartTime() {
+    return seriesStartTime;
+  }
+
+  public void setSeriesStartTime(Long seriesStartTime) {
+    this.seriesStartTime = seriesStartTime;
+  }
+
+  @XmlElement(name = "supportsAggregation")
+  public boolean isSupportsAggregates() {
+    return supportsAggregates;
+  }
+
+  public void setSupportsAggregates(boolean supportsAggregates) {
+    this.supportsAggregates = supportsAggregates;
+  }
+
+  @JsonIgnore
+  public boolean isPersisted() {
+    return isPersisted;
+  }
+
+  public void setIsPersisted(boolean isPersisted) {
+    this.isPersisted = isPersisted;
+  }
+
+  /**
+   * Assumes the key of the object being compared is the same as @TimelineMetricMetadata
+   * @param metadata @TimelineMetricMetadata to be compared
+   */
+  public boolean needsToBeSynced(TimelineMetricMetadata metadata) throws MetadataException {
+    if (!this.metricName.equals(metadata.getMetricName()) ||
+        !this.appId.equals(metadata.getAppId())) {
+      throw new MetadataException("Unexpected argument: metricName = " +
+        metadata.getMetricName() + ", appId = " + metadata.getAppId());
+    }
+
+    // Series start time should never change
+    return (this.units != null && !this.units.equals(metadata.getUnits())) ||
+      (this.type != null && !this.type.equals(metadata.getType())) ||
+      //!this.lastRecordedTime.equals(metadata.getLastRecordedTime()) || // TODO: support
+      !this.supportsAggregates == metadata.isSupportsAggregates();
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) return true;
+    if (o == null || getClass() != o.getClass()) return false;
+
+    TimelineMetricMetadata that = (TimelineMetricMetadata) o;
+
+    if (!metricName.equals(that.metricName)) return false;
+    return !(appId != null ? !appId.equals(that.appId) : that.appId != null);
+
+  }
+
+  @Override
+  public int hashCode() {
+    int result = metricName.hashCode();
+    result = 31 * result + (appId != null ? appId.hashCode() : 0);
+    return result;
+  }
+}

+ 45 - 28
ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStore.java

@@ -23,12 +23,15 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.metrics2.sink.timeline.Precision;
 import org.apache.hadoop.metrics2.sink.timeline.Precision;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
 import org.apache.hadoop.service.AbstractService;
 import org.apache.hadoop.service.AbstractService;
 import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
 import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.Function;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.Function;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.TimelineMetricAggregator;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.TimelineMetricAggregator;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.TimelineMetricAggregatorFactory;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.TimelineMetricAggregatorFactory;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery.TimelineMetricMetadataKey;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery.TimelineMetricMetadataManager;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.Condition;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.Condition;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.DefaultCondition;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.DefaultCondition;
 
 
@@ -39,15 +42,10 @@ import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashMap;
 import java.util.List;
 import java.util.List;
 import java.util.Map;
 import java.util.Map;
+import java.util.Set;
 import java.util.TreeMap;
 import java.util.TreeMap;
-import java.util.concurrent.ExecutionException;
 import java.util.concurrent.Executors;
 import java.util.concurrent.Executors;
-import java.util.concurrent.RejectedExecutionHandler;
 import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.ScheduledFuture;
-import java.util.concurrent.ScheduledThreadPoolExecutor;
-import java.util.concurrent.ThreadFactory;
-import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.TimeUnit;
 
 
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.USE_GROUPBY_AGGREGATOR_QUERIES;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.USE_GROUPBY_AGGREGATOR_QUERIES;
@@ -58,8 +56,8 @@ public class HBaseTimelineMetricStore extends AbstractService implements Timelin
   private final TimelineMetricConfiguration configuration;
   private final TimelineMetricConfiguration configuration;
   private PhoenixHBaseAccessor hBaseAccessor;
   private PhoenixHBaseAccessor hBaseAccessor;
   private static volatile boolean isInitialized = false;
   private static volatile boolean isInitialized = false;
-  private final ScheduledExecutorService executorService =
-    Executors.newSingleThreadScheduledExecutor();
+  private final ScheduledExecutorService executorService = Executors.newSingleThreadScheduledExecutor();
+  private TimelineMetricMetadataManager metricMetadataManager;
 
 
   /**
   /**
    * Construct the service.
    * Construct the service.
@@ -81,6 +79,9 @@ public class HBaseTimelineMetricStore extends AbstractService implements Timelin
     if (!isInitialized) {
     if (!isInitialized) {
       hBaseAccessor = new PhoenixHBaseAccessor(hbaseConf, metricsConf);
       hBaseAccessor = new PhoenixHBaseAccessor(hbaseConf, metricsConf);
       hBaseAccessor.initMetricSchema();
       hBaseAccessor.initMetricSchema();
+      // Initialize metadata from store
+      metricMetadataManager = new TimelineMetricMetadataManager(hBaseAccessor, metricsConf);
+      metricMetadataManager.initializeMetadata();
 
 
       if (Boolean.parseBoolean(metricsConf.get(USE_GROUPBY_AGGREGATOR_QUERIES, "true"))) {
       if (Boolean.parseBoolean(metricsConf.get(USE_GROUPBY_AGGREGATOR_QUERIES, "true"))) {
         LOG.info("Using group by aggregators for aggregating host and cluster metrics.");
         LOG.info("Using group by aggregators for aggregating host and cluster metrics.");
@@ -88,7 +89,7 @@ public class HBaseTimelineMetricStore extends AbstractService implements Timelin
 
 
       // Start the cluster aggregator second
       // Start the cluster aggregator second
       TimelineMetricAggregator secondClusterAggregator =
       TimelineMetricAggregator secondClusterAggregator =
-        TimelineMetricAggregatorFactory.createTimelineClusterAggregatorSecond(hBaseAccessor, metricsConf);
+        TimelineMetricAggregatorFactory.createTimelineClusterAggregatorSecond(hBaseAccessor, metricsConf, metricMetadataManager);
       if (!secondClusterAggregator.isDisabled()) {
       if (!secondClusterAggregator.isDisabled()) {
         Thread aggregatorThread = new Thread(secondClusterAggregator);
         Thread aggregatorThread = new Thread(secondClusterAggregator);
         aggregatorThread.start();
         aggregatorThread.start();
@@ -188,8 +189,7 @@ public class HBaseTimelineMetricStore extends AbstractService implements Timelin
     TimelineMetrics metrics;
     TimelineMetrics metrics;
 
 
     if (hostnames == null || hostnames.isEmpty()) {
     if (hostnames == null || hostnames.isEmpty()) {
-      metrics = hBaseAccessor.getAggregateMetricRecords(condition,
-          metricFunctions);
+      metrics = hBaseAccessor.getAggregateMetricRecords(condition, metricFunctions);
     } else {
     } else {
       metrics = hBaseAccessor.getMetricRecords(condition, metricFunctions);
       metrics = hBaseAccessor.getMetricRecords(condition, metricFunctions);
     }
     }
@@ -199,7 +199,7 @@ public class HBaseTimelineMetricStore extends AbstractService implements Timelin
   private TimelineMetrics postProcessMetrics(TimelineMetrics metrics) {
   private TimelineMetrics postProcessMetrics(TimelineMetrics metrics) {
     List<TimelineMetric> metricsList = metrics.getMetrics();
     List<TimelineMetric> metricsList = metrics.getMetrics();
 
 
-    for (TimelineMetric metric: metricsList){
+    for (TimelineMetric metric : metricsList){
       String name = metric.getMetricName();
       String name = metric.getMetricName();
       if (name.contains("._rate")){
       if (name.contains("._rate")){
         updateValueAsRate(metric.getMetricValues());
         updateValueAsRate(metric.getMetricValues());
@@ -250,22 +250,17 @@ public class HBaseTimelineMetricStore extends AbstractService implements Timelin
         // fallback to VALUE, and fullMetricName
         // fallback to VALUE, and fullMetricName
       }
       }
 
 
-      addFunctionToMetricName(metricsFunctions, cleanMetricName, function);
+      List<Function> functionsList = metricsFunctions.get(cleanMetricName);
+      if (functionsList == null) {
+        functionsList = new ArrayList<Function>(1);
+      }
+      functionsList.add(function);
+      metricsFunctions.put(cleanMetricName, functionsList);
     }
     }
 
 
     return metricsFunctions;
     return metricsFunctions;
   }
   }
 
 
-  private static void addFunctionToMetricName(
-    HashMap<String, List<Function>> metricsFunctions, String cleanMetricName,
-    Function function) {
-
-    List<Function> functionsList = metricsFunctions.get(cleanMetricName);
-    if (functionsList==null) functionsList = new ArrayList<Function>(1);
-    functionsList.add(function);
-    metricsFunctions.put(cleanMetricName, functionsList);
-  }
-
   @Override
   @Override
   public TimelineMetric getTimelineMetric(String metricName, List<String> hostnames,
   public TimelineMetric getTimelineMetric(String metricName, List<String> hostnames,
       String applicationId, String instanceId, Long startTime,
       String applicationId, String instanceId, Long startTime,
@@ -314,16 +309,38 @@ public class HBaseTimelineMetricStore extends AbstractService implements Timelin
     return metric;
     return metric;
   }
   }
 
 
-
   @Override
   @Override
-  public TimelinePutResponse putMetrics(TimelineMetrics metrics)
-    throws SQLException, IOException {
-
+  public TimelinePutResponse putMetrics(TimelineMetrics metrics) throws SQLException, IOException {
     // Error indicated by the Sql exception
     // Error indicated by the Sql exception
     TimelinePutResponse response = new TimelinePutResponse();
     TimelinePutResponse response = new TimelinePutResponse();
 
 
-    hBaseAccessor.insertMetricRecords(metrics);
+    hBaseAccessor.insertMetricRecordsWithMetadata(metricMetadataManager, metrics);
 
 
     return response;
     return response;
   }
   }
+
+  @Override
+  public Map<String, List<TimelineMetricMetadata>> getTimelineMetricMetadata() throws SQLException, IOException {
+    Map<TimelineMetricMetadataKey, TimelineMetricMetadata> metadata =
+      metricMetadataManager.getMetadataCache();
+
+    // Group Metadata by AppId
+    Map<String, List<TimelineMetricMetadata>> metadataByAppId = new HashMap<>();
+    for (TimelineMetricMetadata metricMetadata : metadata.values()) {
+      List<TimelineMetricMetadata> metadataList = metadataByAppId.get(metricMetadata.getAppId());
+      if (metadataList == null) {
+        metadataList = new ArrayList<>();
+        metadataByAppId.put(metricMetadata.getAppId(), metadataList);
+      }
+
+      metadataList.add(metricMetadata);
+    }
+
+    return metadataByAppId;
+  }
+
+  @Override
+  public Map<String, Set<String>> getHostAppsMetadata() throws SQLException, IOException {
+    return metricMetadataManager.getHostedAppsCache();
+  }
 }
 }

+ 248 - 5
ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java

@@ -17,6 +17,7 @@
  */
  */
 package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
 package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
 
 
+import com.google.common.base.Enums;
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.LogFactory;
@@ -27,6 +28,7 @@ import org.apache.hadoop.hbase.util.RetryCounterFactory;
 import org.apache.hadoop.metrics2.sink.timeline.Precision;
 import org.apache.hadoop.metrics2.sink.timeline.Precision;
 import org.apache.hadoop.metrics2.sink.timeline.SingleValuedTimelineMetric;
 import org.apache.hadoop.metrics2.sink.timeline.SingleValuedTimelineMetric;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.AggregatorUtils;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.AggregatorUtils;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.Function;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.Function;
@@ -34,6 +36,8 @@ import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.MetricHostAggregate;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.MetricHostAggregate;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.TimelineClusterMetric;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.TimelineClusterMetric;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.TimelineMetricReadHelper;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.TimelineMetricReadHelper;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery.TimelineMetricMetadataKey;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery.TimelineMetricMetadataManager;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.Condition;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.Condition;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.ConnectionProvider;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.ConnectionProvider;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.DefaultPhoenixDataSource;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.DefaultPhoenixDataSource;
@@ -51,12 +55,18 @@ import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.SQLException;
 import java.sql.Statement;
 import java.sql.Statement;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
 import java.util.List;
 import java.util.Map;
 import java.util.Map;
+import java.util.Set;
 import java.util.TreeMap;
 import java.util.TreeMap;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.TimeUnit;
 
 
 import static java.util.concurrent.TimeUnit.SECONDS;
 import static java.util.concurrent.TimeUnit.SECONDS;
+import static org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata.*;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.AGGREGATE_TABLE_SPLIT_POINTS;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.AGGREGATE_TABLE_SPLIT_POINTS;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.AGGREGATORS_SKIP_BLOCK_CACHE;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.AGGREGATORS_SKIP_BLOCK_CACHE;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.CLUSTER_DAILY_TABLE_TTL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.CLUSTER_DAILY_TABLE_TTL;
@@ -75,12 +85,16 @@ import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.ti
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.PRECISION_TABLE_SPLIT_POINTS;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.PRECISION_TABLE_SPLIT_POINTS;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.PRECISION_TABLE_TTL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.PRECISION_TABLE_TTL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.ALTER_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.ALTER_SQL;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.CREATE_HOSTED_APPS_METADATA_TABLE_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.CREATE_METRICS_AGGREGATE_TABLE_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.CREATE_METRICS_AGGREGATE_TABLE_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.CREATE_METRICS_CLUSTER_AGGREGATE_GROUPED_TABLE_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.CREATE_METRICS_CLUSTER_AGGREGATE_GROUPED_TABLE_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.CREATE_METRICS_CLUSTER_AGGREGATE_TABLE_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.CREATE_METRICS_CLUSTER_AGGREGATE_TABLE_SQL;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.CREATE_METRICS_METADATA_TABLE_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.CREATE_METRICS_TABLE_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.CREATE_METRICS_TABLE_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.DEFAULT_ENCODING;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.DEFAULT_ENCODING;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.DEFAULT_TABLE_COMPRESSION;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.DEFAULT_TABLE_COMPRESSION;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.GET_HOSTED_APPS_METADATA_SQL;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.GET_METRIC_METADATA_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.METRICS_AGGREGATE_DAILY_TABLE_NAME;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.METRICS_AGGREGATE_DAILY_TABLE_NAME;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.METRICS_AGGREGATE_HOURLY_TABLE_NAME;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.METRICS_AGGREGATE_HOURLY_TABLE_NAME;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.METRICS_AGGREGATE_MINUTE_TABLE_NAME;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.METRICS_AGGREGATE_MINUTE_TABLE_NAME;
@@ -92,6 +106,8 @@ import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.ti
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.UPSERT_AGGREGATE_RECORD_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.UPSERT_AGGREGATE_RECORD_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.UPSERT_CLUSTER_AGGREGATE_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.UPSERT_CLUSTER_AGGREGATE_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.UPSERT_CLUSTER_AGGREGATE_TIME_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.UPSERT_CLUSTER_AGGREGATE_TIME_SQL;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.UPSERT_HOSTED_APPS_METADATA_SQL;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.UPSERT_METADATA_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.UPSERT_METRICS_SQL;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.UPSERT_METRICS_SQL;
 
 
 /**
 /**
@@ -260,6 +276,14 @@ public class PhoenixHBaseAccessor {
       conn = getConnectionRetryingOnException();
       conn = getConnectionRetryingOnException();
       stmt = conn.createStatement();
       stmt = conn.createStatement();
 
 
+      // Metadata
+      String metadataSql = String.format(CREATE_METRICS_METADATA_TABLE_SQL,
+        encoding, compression);
+      stmt.executeUpdate(metadataSql);
+      String hostedAppSql = String.format(CREATE_HOSTED_APPS_METADATA_TABLE_SQL,
+        encoding, compression);
+      stmt.executeUpdate(hostedAppSql);
+
       // Host level
       // Host level
       String precisionSql = String.format(CREATE_METRICS_TABLE_SQL,
       String precisionSql = String.format(CREATE_METRICS_TABLE_SQL,
         encoding, precisionTtl, compression);
         encoding, precisionTtl, compression);
@@ -371,8 +395,8 @@ public class PhoenixHBaseAccessor {
     return "";
     return "";
   }
   }
 
 
-  public void insertMetricRecords(TimelineMetrics metrics) throws SQLException, IOException {
-
+  public void insertMetricRecordsWithMetadata(TimelineMetricMetadataManager metadataManager,
+                                              TimelineMetrics metrics) throws SQLException, IOException {
     List<TimelineMetric> timelineMetrics = metrics.getMetrics();
     List<TimelineMetric> timelineMetrics = metrics.getMetrics();
     if (timelineMetrics == null || timelineMetrics.isEmpty()) {
     if (timelineMetrics == null || timelineMetrics.isEmpty()) {
       LOG.debug("Empty metrics insert request.");
       LOG.debug("Empty metrics insert request.");
@@ -422,8 +446,16 @@ public class PhoenixHBaseAccessor {
 
 
         try {
         try {
           metricRecordStmt.executeUpdate();
           metricRecordStmt.executeUpdate();
+
+          // Write to metadata cache on successful write to store
+          metadataManager.putIfModifiedTimelineMetricMetadata(
+            metadataManager.getTimelineMetricMetadata(metric));
+
+          metadataManager.putIfModifiedHostedAppsMetadata(
+            metric.getHostName(), metric.getAppId());
+
         } catch (SQLException sql) {
         } catch (SQLException sql) {
-          LOG.error(sql);
+          LOG.error("Failed on insert records to store.", sql);
         }
         }
       }
       }
 
 
@@ -448,6 +480,10 @@ public class PhoenixHBaseAccessor {
     }
     }
   }
   }
 
 
+  public void insertMetricRecords(TimelineMetrics metrics) throws SQLException, IOException {
+    insertMetricRecordsWithMetadata(null, metrics);
+  }
+
   @SuppressWarnings("unchecked")
   @SuppressWarnings("unchecked")
   public TimelineMetrics getMetricRecords(
   public TimelineMetrics getMetricRecords(
     final Condition condition, Map<String, List<Function>> metricFunctions)
     final Condition condition, Map<String, List<Function>> metricFunctions)
@@ -566,8 +602,7 @@ public class PhoenixHBaseAccessor {
       }
       }
     }
     }
     else {
     else {
-      TimelineMetric metric;
-      metric = TIMELINE_METRIC_READ_HELPER.getTimelineMetricFromResultSet(rs);
+      TimelineMetric metric = TIMELINE_METRIC_READ_HELPER.getTimelineMetricFromResultSet(rs);
 
 
       if (condition.isGrouped()) {
       if (condition.isGrouped()) {
         metrics.addOrMergeTimelineMetric(metric);
         metrics.addOrMergeTimelineMetric(metric);
@@ -1032,4 +1067,212 @@ public class PhoenixHBaseAccessor {
   public boolean isSkipBlockCacheForAggregatorsEnabled() {
   public boolean isSkipBlockCacheForAggregatorsEnabled() {
     return skipBlockCacheForAggregatorsEnabled;
     return skipBlockCacheForAggregatorsEnabled;
   }
   }
+
+  /**
+   * One time save of metadata when discovering topology during aggregation.
+   * @throws SQLException
+   */
+  public void saveHostAppsMetadata(Map<String, Set<String>> hostedApps) throws SQLException {
+    Connection conn = getConnection();
+    PreparedStatement stmt = null;
+    try {
+      stmt = conn.prepareStatement(UPSERT_HOSTED_APPS_METADATA_SQL);
+      int rowCount = 0;
+
+      for (Map.Entry<String, Set<String>> hostedAppsEntry : hostedApps.entrySet()) {
+        if (LOG.isTraceEnabled()) {
+          LOG.trace("HostedAppsMetadata: " + hostedAppsEntry);
+        }
+
+        stmt.clearParameters();
+        stmt.setString(1, hostedAppsEntry.getKey());
+        stmt.setString(2, StringUtils.join(hostedAppsEntry.getValue(), ","));
+        try {
+          stmt.executeUpdate();
+          rowCount++;
+        } catch (SQLException sql) {
+          LOG.error("Error saving hosted apps metadata.", sql);
+        }
+      }
+
+      conn.commit();
+      LOG.info("Saved " + rowCount + " hosted apps metadata records.");
+
+    } finally {
+      if (stmt != null) {
+        try {
+          stmt.close();
+        } catch (SQLException e) {
+          // Ignore
+        }
+      }
+      if (conn != null) {
+        try {
+          conn.close();
+        } catch (SQLException sql) {
+          // Ignore
+        }
+      }
+    }
+  }
+
+  /**
+   * Save metdata on updates.
+   * @param metricMetadata @Collection<@TimelineMetricMetadata>
+   * @throws SQLException
+   */
+  public void saveMetricMetadata(Collection<TimelineMetricMetadata> metricMetadata) throws SQLException {
+    if (metricMetadata.isEmpty()) {
+      LOG.info("No metadata records to save.");
+      return;
+    }
+
+    Connection conn = getConnection();
+    PreparedStatement stmt = null;
+
+    try {
+      stmt = conn.prepareStatement(UPSERT_METADATA_SQL);
+      int rowCount = 0;
+
+      for (TimelineMetricMetadata metadata : metricMetadata) {
+        if (LOG.isTraceEnabled()) {
+          LOG.trace("TimelineMetricMetadata: metricName = " + metadata.getMetricName()
+            + ", appId = " + metadata.getAppId()
+            + ", seriesStartTime = " + metadata.getSeriesStartTime()
+          );
+        }
+
+        stmt.clearParameters();
+        stmt.setString(1, metadata.getMetricName());
+        stmt.setString(2, metadata.getAppId());
+        stmt.setString(3, metadata.getUnits());
+        stmt.setString(4, metadata.getType().name());
+        stmt.setLong(5, metadata.getSeriesStartTime());
+        stmt.setBoolean(6, metadata.isSupportsAggregates());
+
+        try {
+          stmt.executeUpdate();
+          rowCount++;
+        } catch (SQLException sql) {
+          LOG.error("Error saving metadata.", sql);
+        }
+      }
+
+      conn.commit();
+      LOG.info("Saved " + rowCount + " metadata records.");
+
+    } finally {
+      if (stmt != null) {
+        try {
+          stmt.close();
+        } catch (SQLException e) {
+          // Ignore
+        }
+      }
+      if (conn != null) {
+        try {
+          conn.close();
+        } catch (SQLException sql) {
+          // Ignore
+        }
+      }
+    }
+  }
+
+  public Map<String, Set<String>> getHostedAppsMetadata() throws SQLException {
+    Map<String, Set<String>> hostedAppMap = new HashMap<>();
+    Connection conn = getConnection();
+    PreparedStatement stmt = null;
+    ResultSet rs = null;
+
+    try {
+      stmt = conn.prepareStatement(GET_HOSTED_APPS_METADATA_SQL);
+      rs = stmt.executeQuery();
+
+      while (rs.next()) {
+        hostedAppMap.put(rs.getString("HOSTNAME"),
+          new HashSet<>(Arrays.asList(StringUtils.split(rs.getString("APP_IDS"), ","))));
+      }
+
+    } finally {
+      if (rs != null) {
+        try {
+          rs.close();
+        } catch (SQLException e) {
+          // Ignore
+        }
+      }
+      if (stmt != null) {
+        try {
+          stmt.close();
+        } catch (SQLException e) {
+          // Ignore
+        }
+      }
+      if (conn != null) {
+        try {
+          conn.close();
+        } catch (SQLException sql) {
+          // Ignore
+        }
+      }
+    }
+
+    return hostedAppMap;
+  }
+
+  // No filter criteria support for now.
+  public Map<TimelineMetricMetadataKey, TimelineMetricMetadata> getTimelineMetricMetadata() throws SQLException {
+    Map<TimelineMetricMetadataKey, TimelineMetricMetadata> metadataMap = new HashMap<>();
+    Connection conn = getConnection();
+    PreparedStatement stmt = null;
+    ResultSet rs = null;
+
+    try {
+      stmt = conn.prepareStatement(GET_METRIC_METADATA_SQL);
+      rs = stmt.executeQuery();
+
+      while (rs.next()) {
+        String metricName = rs.getString("METRIC_NAME");
+        String appId = rs.getString("APP_ID");
+        TimelineMetricMetadata metadata = new TimelineMetricMetadata(
+          metricName,
+          appId,
+          rs.getString("UNITS"),
+          Enums.getIfPresent(MetricType.class, rs.getString("TYPE")).or(MetricType.UNDEFINED),
+          rs.getLong("START_TIME"),
+          rs.getBoolean("SUPPORTS_AGGREGATION")
+        );
+
+        TimelineMetricMetadataKey key = new TimelineMetricMetadataKey(metricName, appId);
+        metadata.setIsPersisted(true); // Always true on retrieval
+        metadataMap.put(key, metadata);
+      }
+
+    } finally {
+      if (rs != null) {
+        try {
+          rs.close();
+        } catch (SQLException e) {
+          // Ignore
+        }
+      }
+      if (stmt != null) {
+        try {
+          stmt.close();
+        } catch (SQLException e) {
+          // Ignore
+        }
+      }
+      if (conn != null) {
+        try {
+          conn.close();
+        } catch (SQLException sql) {
+          // Ignore
+        }
+      }
+    }
+
+    return metadataMap;
+  }
 }
 }

+ 9 - 0
ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java

@@ -202,6 +202,15 @@ public class TimelineMetricConfiguration {
   public static final String AGGREGATORS_SKIP_BLOCK_CACHE =
   public static final String AGGREGATORS_SKIP_BLOCK_CACHE =
     "timeline.metrics.aggregators.skip.blockcache.enabled";
     "timeline.metrics.aggregators.skip.blockcache.enabled";
 
 
+  public static final String DISABLE_METRIC_METADATA_MGMT =
+    "timeline.metrics.service.metadata.management.disabled";
+
+  public static final String METRICS_METADATA_SYNC_INIT_DELAY =
+    "timeline.metrics.service.metadata.sync.init.delay";
+
+  public static final String METRICS_METADATA_SYNC_SCHEDULE_DELAY =
+    "timeline.metrics.service.metadata.sync.delay";
+
   public static final String HOST_APP_ID = "HOST";
   public static final String HOST_APP_ID = "HOST";
 
 
   private Configuration hbaseConf;
   private Configuration hbaseConf;

+ 20 - 2
ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricStore.java

@@ -19,11 +19,14 @@ package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline
 
 
 import org.apache.hadoop.metrics2.sink.timeline.Precision;
 import org.apache.hadoop.metrics2.sink.timeline.Precision;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
 import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
 import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
 import java.io.IOException;
 import java.io.IOException;
 import java.sql.SQLException;
 import java.sql.SQLException;
 import java.util.List;
 import java.util.List;
+import java.util.Map;
+import java.util.Set;
 
 
 public interface TimelineMetricStore {
 public interface TimelineMetricStore {
   /**
   /**
@@ -67,6 +70,21 @@ public interface TimelineMetricStore {
    * @return An {@link org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse}.
    * @return An {@link org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse}.
    * @throws SQLException, IOException
    * @throws SQLException, IOException
    */
    */
-  TimelinePutResponse putMetrics(TimelineMetrics metrics)
-    throws SQLException, IOException;
+  TimelinePutResponse putMetrics(TimelineMetrics metrics) throws SQLException, IOException;
+
+  /**
+   * Return all metrics metadata that have been written to the store.
+   * @return { appId : [ @TimelineMetricMetadata ] }
+   * @throws SQLException
+   * @throws IOException
+   */
+  Map<String, List<TimelineMetricMetadata>> getTimelineMetricMetadata() throws SQLException, IOException;
+
+  /**
+   * Returns all hosts that have written metrics with the apps on the host
+   * @return { hostname : [ appIds ] }
+   * @throws SQLException
+   * @throws IOException
+   */
+  Map<String, Set<String>> getHostAppsMetadata() throws SQLException, IOException;
 }
 }

+ 4 - 1
ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/TimelineMetricAggregatorFactory.java

@@ -20,6 +20,7 @@ package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline
 import org.apache.commons.io.FilenameUtils;
 import org.apache.commons.io.FilenameUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixHBaseAccessor;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixHBaseAccessor;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery.TimelineMetricMetadataManager;
 
 
 import static java.util.concurrent.TimeUnit.SECONDS;
 import static java.util.concurrent.TimeUnit.SECONDS;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.CLUSTER_AGGREGATOR_DAILY_CHECKPOINT_CUTOFF_MULTIPLIER;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.CLUSTER_AGGREGATOR_DAILY_CHECKPOINT_CUTOFF_MULTIPLIER;
@@ -227,7 +228,8 @@ public class TimelineMetricAggregatorFactory {
    * Timeslice : 30 sec
    * Timeslice : 30 sec
    */
    */
   public static TimelineMetricAggregator createTimelineClusterAggregatorSecond(
   public static TimelineMetricAggregator createTimelineClusterAggregatorSecond(
-    PhoenixHBaseAccessor hBaseAccessor, Configuration metricsConf) {
+    PhoenixHBaseAccessor hBaseAccessor, Configuration metricsConf,
+    TimelineMetricMetadataManager metadataManager) {
 
 
     String checkpointDir = metricsConf.get(
     String checkpointDir = metricsConf.get(
       TIMELINE_METRICS_AGGREGATOR_CHECKPOINT_DIR, DEFAULT_CHECKPOINT_LOCATION);
       TIMELINE_METRICS_AGGREGATOR_CHECKPOINT_DIR, DEFAULT_CHECKPOINT_LOCATION);
@@ -251,6 +253,7 @@ public class TimelineMetricAggregatorFactory {
     // Second based aggregation have added responsibility of time slicing
     // Second based aggregation have added responsibility of time slicing
     return new TimelineMetricClusterAggregatorSecond(
     return new TimelineMetricClusterAggregatorSecond(
       "TimelineClusterAggregatorSecond",
       "TimelineClusterAggregatorSecond",
+      metadataManager,
       hBaseAccessor, metricsConf,
       hBaseAccessor, metricsConf,
       checkpointLocation,
       checkpointLocation,
       sleepIntervalMillis,
       sleepIntervalMillis,

+ 12 - 16
ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/TimelineMetricAppAggregator.java

@@ -21,12 +21,17 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery.TimelineMetricMetadataManager;
+
 import java.util.ArrayList;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
 import java.util.List;
 import java.util.Map;
 import java.util.Map;
+import java.util.Set;
+
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.CLUSTER_AGGREGATOR_APP_IDS;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.CLUSTER_AGGREGATOR_APP_IDS;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.HOST_APP_ID;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.HOST_APP_ID;
 
 
@@ -40,13 +45,13 @@ public class TimelineMetricAppAggregator {
   private static final Log LOG = LogFactory.getLog(TimelineMetricAppAggregator.class);
   private static final Log LOG = LogFactory.getLog(TimelineMetricAppAggregator.class);
   // Lookup to check candidacy of an app
   // Lookup to check candidacy of an app
   private final List<String> appIdsToAggregate;
   private final List<String> appIdsToAggregate;
-  // Map to lookup apps on a host
-  private Map<String, List<String>> hostedAppsMap = new HashMap<String, List<String>>();
-
+  private final Map<String, Set<String>> hostedAppsMap;
   Map<TimelineClusterMetric, MetricClusterAggregate> aggregateClusterMetrics;
   Map<TimelineClusterMetric, MetricClusterAggregate> aggregateClusterMetrics;
 
 
-  public TimelineMetricAppAggregator(Configuration metricsConf) {
+  public TimelineMetricAppAggregator(TimelineMetricMetadataManager metadataManager,
+                                     Configuration metricsConf) {
     appIdsToAggregate = getAppIdsForHostAggregation(metricsConf);
     appIdsToAggregate = getAppIdsForHostAggregation(metricsConf);
+    hostedAppsMap = metadataManager.getHostedAppsCache();
     LOG.info("AppIds configured for aggregation: " + appIdsToAggregate);
     LOG.info("AppIds configured for aggregation: " + appIdsToAggregate);
   }
   }
 
 
@@ -66,15 +71,6 @@ public class TimelineMetricAppAggregator {
     aggregateClusterMetrics = null;
     aggregateClusterMetrics = null;
   }
   }
 
 
-  /**
-   * Useful for resetting apps that no-longer need aggregation without restart.
-   */
-  public void destroy() {
-    LOG.debug("Cleanup aggregated data as well as in-memory state.");
-    aggregateClusterMetrics = null;
-    hostedAppsMap = new HashMap<String, List<String>>();
-  }
-
   /**
   /**
    * Calculate aggregates if the clusterMetric is a Host metric for recorded
    * Calculate aggregates if the clusterMetric is a Host metric for recorded
    * apps that are housed by this host.
    * apps that are housed by this host.
@@ -101,9 +97,9 @@ public class TimelineMetricAppAggregator {
       // Build the hostedapps map if not a host metric
       // Build the hostedapps map if not a host metric
       // Check app candidacy for host aggregation
       // Check app candidacy for host aggregation
       if (appIdsToAggregate.contains(appId)) {
       if (appIdsToAggregate.contains(appId)) {
-        List<String> appIds = hostedAppsMap.get(hostname);
+        Set<String> appIds = hostedAppsMap.get(hostname);
         if (appIds == null) {
         if (appIds == null) {
-          appIds = new ArrayList<String>();
+          appIds = new HashSet<>();
           hostedAppsMap.put(hostname, appIds);
           hostedAppsMap.put(hostname, appIds);
         }
         }
         if (!appIds.contains(appId)) {
         if (!appIds.contains(appId)) {
@@ -126,7 +122,7 @@ public class TimelineMetricAppAggregator {
       return;
       return;
     }
     }
 
 
-    List<String> apps = hostedAppsMap.get(hostname);
+    Set<String> apps = hostedAppsMap.get(hostname);
     for (String appId : apps) {
     for (String appId : apps) {
       // Add a new cluster aggregate metric if none exists
       // Add a new cluster aggregate metric if none exists
       TimelineClusterMetric appTimelineClusterMetric =
       TimelineClusterMetric appTimelineClusterMetric =

+ 4 - 1
ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/TimelineMetricClusterAggregatorSecond.java

@@ -23,6 +23,7 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixHBaseAccessor;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixHBaseAccessor;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery.TimelineMetricMetadataManager;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.Condition;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.Condition;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.DefaultCondition;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.DefaultCondition;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL;
@@ -50,7 +51,9 @@ public class TimelineMetricClusterAggregatorSecond extends AbstractTimelineAggre
   // 1 minute client side buffering adjustment
   // 1 minute client side buffering adjustment
   private final Long serverTimeShiftAdjustment;
   private final Long serverTimeShiftAdjustment;
 
 
+
   public TimelineMetricClusterAggregatorSecond(String aggregatorName,
   public TimelineMetricClusterAggregatorSecond(String aggregatorName,
+                                               TimelineMetricMetadataManager metadataManager,
                                                PhoenixHBaseAccessor hBaseAccessor,
                                                PhoenixHBaseAccessor hBaseAccessor,
                                                Configuration metricsConf,
                                                Configuration metricsConf,
                                                String checkpointLocation,
                                                String checkpointLocation,
@@ -65,7 +68,7 @@ public class TimelineMetricClusterAggregatorSecond extends AbstractTimelineAggre
       sleepIntervalMillis, checkpointCutOffMultiplier, aggregatorDisabledParam,
       sleepIntervalMillis, checkpointCutOffMultiplier, aggregatorDisabledParam,
       tableName, outputTableName, nativeTimeRangeDelay);
       tableName, outputTableName, nativeTimeRangeDelay);
 
 
-    appAggregator = new TimelineMetricAppAggregator(metricsConf);
+    appAggregator = new TimelineMetricAppAggregator(metadataManager, metricsConf);
     this.timeSliceIntervalMillis = timeSliceInterval;
     this.timeSliceIntervalMillis = timeSliceInterval;
     this.serverTimeShiftAdjustment = Long.parseLong(metricsConf.get(SERVER_SIDE_TIMESIFT_ADJUSTMENT, "90000"));
     this.serverTimeShiftAdjustment = Long.parseLong(metricsConf.get(SERVER_SIDE_TIMESIFT_ADJUSTMENT, "90000"));
   }
   }

+ 56 - 0
ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataKey.java

@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery;
+
+public class TimelineMetricMetadataKey {
+  String metricName;
+  String appId;
+
+  public TimelineMetricMetadataKey(String metricName, String appId) {
+    this.metricName = metricName;
+    this.appId = appId;
+  }
+
+  public String getMetricName() {
+    return metricName;
+  }
+
+  public String getAppId() {
+    return appId;
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) return true;
+    if (o == null || getClass() != o.getClass()) return false;
+
+    TimelineMetricMetadataKey that = (TimelineMetricMetadataKey) o;
+
+    if (!metricName.equals(that.metricName)) return false;
+    return !(appId != null ? !appId.equals(that.appId) : that.appId != null);
+
+  }
+
+  @Override
+  public int hashCode() {
+    int result = metricName.hashCode();
+    result = 31 * result + (appId != null ? appId.hashCode() : 0);
+    return result;
+  }
+
+}

+ 187 - 0
ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataManager.java

@@ -0,0 +1,187 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.metrics2.sink.timeline.MetadataException;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixHBaseAccessor;
+
+import java.sql.SQLException;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import static org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata.MetricType.UNDEFINED;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.DISABLE_METRIC_METADATA_MGMT;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.METRICS_METADATA_SYNC_INIT_DELAY;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.METRICS_METADATA_SYNC_SCHEDULE_DELAY;
+
+public class TimelineMetricMetadataManager {
+  private static final Log LOG = LogFactory.getLog(TimelineMetricMetadataManager.class);
+  private boolean isDisabled = false;
+  // Cache all metadata on retrieval
+  private final Map<TimelineMetricMetadataKey, TimelineMetricMetadata> METADATA_CACHE = new ConcurrentHashMap<>();
+  // Map to lookup apps on a host
+  private final Map<String, Set<String>> HOSTED_APPS_MAP = new ConcurrentHashMap<>();
+  // Sync only when needed
+  AtomicBoolean SYNC_HOSTED_APPS_METADATA = new AtomicBoolean(false);
+
+  // Single thread to sync back new writes to the store
+  private final ScheduledExecutorService executorService = Executors.newSingleThreadScheduledExecutor();
+
+  private PhoenixHBaseAccessor hBaseAccessor;
+  private Configuration metricsConf;
+
+  public TimelineMetricMetadataManager(PhoenixHBaseAccessor hBaseAccessor,
+                                       Configuration metricsConf) {
+    this.hBaseAccessor = hBaseAccessor;
+    this.metricsConf = metricsConf;
+  }
+
+  /**
+   * Initialize Metadata from the store
+   */
+  public void initializeMetadata() {
+    if (metricsConf.getBoolean(DISABLE_METRIC_METADATA_MGMT, false)) {
+      isDisabled = true;
+    } else {
+      // Schedule the executor to sync to store
+      executorService.scheduleWithFixedDelay(new TimelineMetricMetadataSync(this),
+        metricsConf.getInt(METRICS_METADATA_SYNC_INIT_DELAY, 120), // 2 minutes
+        metricsConf.getInt(METRICS_METADATA_SYNC_SCHEDULE_DELAY, 300), // 5 minutes
+        TimeUnit.SECONDS);
+      // Read from store and initialize map
+      try {
+        Map<TimelineMetricMetadataKey, TimelineMetricMetadata> metadata =
+          hBaseAccessor.getTimelineMetricMetadata();
+
+        LOG.info("Retrieved " + metadata.size() + ", metadata objects from store.");
+        // Store in the cache
+        METADATA_CACHE.putAll(metadata);
+
+        Map<String, Set<String>> hostedAppData = hBaseAccessor.getHostedAppsMetadata();
+
+        LOG.info("Retrieved " + hostedAppData.size() + " host objects from store.");
+        HOSTED_APPS_MAP.putAll(hostedAppData);
+
+      } catch (SQLException e) {
+        LOG.warn("Exception loading metric metadata", e);
+      }
+    }
+  }
+
+  public Map<TimelineMetricMetadataKey, TimelineMetricMetadata> getMetadataCache() {
+    return METADATA_CACHE;
+  }
+
+  public Map<String, Set<String>> getHostedAppsCache() {
+    return HOSTED_APPS_MAP;
+  }
+
+  public boolean syncHostedAppsMetadata() {
+    return SYNC_HOSTED_APPS_METADATA.get();
+  }
+
+  public void markSuccessOnSyncHostedAppsMetadata() {
+    SYNC_HOSTED_APPS_METADATA.set(false);
+  }
+
+  /**
+   * Update value in metadata cache
+   * @param metadata @TimelineMetricMetadata
+   */
+  public void putIfModifiedTimelineMetricMetadata(TimelineMetricMetadata metadata) {
+    TimelineMetricMetadataKey key = new TimelineMetricMetadataKey(
+      metadata.getMetricName(), metadata.getAppId());
+
+    TimelineMetricMetadata metadataFromCache = METADATA_CACHE.get(key);
+
+    if (metadataFromCache != null) {
+      try {
+        if (metadataFromCache.needsToBeSynced(metadata)) {
+          metadata.setIsPersisted(false); // Set the flag to ensure sync to store on next run
+          METADATA_CACHE.put(key, metadata);
+        }
+      } catch (MetadataException e) {
+        LOG.warn("Error inserting Metadata in cache.", e);
+      }
+
+    } else {
+      METADATA_CACHE.put(key, metadata);
+    }
+  }
+
+  /**
+   * Update value in hosted apps cache
+   * @param hostname Host name
+   * @param appId Application Id
+   */
+  public void putIfModifiedHostedAppsMetadata(String hostname, String appId) {
+    Set<String> apps = HOSTED_APPS_MAP.get(hostname);
+    if (apps == null) {
+      apps = new HashSet<>();
+      HOSTED_APPS_MAP.put(hostname, apps);
+    }
+
+    if (!apps.contains(appId)) {
+      apps.add(appId);
+      SYNC_HOSTED_APPS_METADATA.set(true);
+    }
+  }
+
+  public void persistMetadata(Collection<TimelineMetricMetadata> metadata) throws SQLException {
+    hBaseAccessor.saveMetricMetadata(metadata);
+  }
+
+  public void persistHostedAppsMetadata(Map<String, Set<String>> hostedApps) throws SQLException {
+    hBaseAccessor.saveHostAppsMetadata(hostedApps);
+  }
+
+  public TimelineMetricMetadata getTimelineMetricMetadata(TimelineMetric timelineMetric) {
+    return new TimelineMetricMetadata(
+      timelineMetric.getMetricName(),
+      timelineMetric.getAppId(),
+      timelineMetric.getType(), // Present type and unit are synonyms
+      UNDEFINED, // TODO: Add support for types in the application
+      timelineMetric.getStartTime(),
+      true
+    );
+  }
+
+  /**
+   * Fetch hosted apps from store
+   * @throws SQLException
+   */
+  Map<String, Set<String>> getPersistedHostedAppsData() throws SQLException {
+    return hBaseAccessor.getHostedAppsMetadata();
+  }
+
+  public boolean isDisabled() {
+    return isDisabled;
+  }
+}

+ 105 - 0
ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataSync.java

@@ -0,0 +1,105 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Sync metadata info with the store
+ */
+public class TimelineMetricMetadataSync implements Runnable {
+  private static final Log LOG = LogFactory.getLog(TimelineMetricMetadataSync.class);
+
+  private final TimelineMetricMetadataManager cacheManager;
+
+  public TimelineMetricMetadataSync(TimelineMetricMetadataManager cacheManager) {
+    this.cacheManager = cacheManager;
+  }
+
+  @Override
+  public void run() {
+    List<TimelineMetricMetadata> metadataToPersist = new ArrayList<>();
+    // Find all entries to persist
+    for (TimelineMetricMetadata metadata : cacheManager.getMetadataCache().values()) {
+      if (!metadata.isPersisted()) {
+        metadataToPersist.add(metadata);
+      }
+    }
+    boolean markSuccess = false;
+    if (!metadataToPersist.isEmpty()) {
+      try {
+        cacheManager.persistMetadata(metadataToPersist);
+        markSuccess = true;
+      } catch (SQLException e) {
+        LOG.warn("Error persisting metadata.", e);
+      }
+    }
+    // Mark corresponding entries as persisted to skip on next run
+    if (markSuccess) {
+      for (TimelineMetricMetadata metadata : metadataToPersist) {
+        TimelineMetricMetadataKey key = new TimelineMetricMetadataKey(
+          metadata.getMetricName(), metadata.getAppId()
+        );
+
+        // Mark entry as being persisted
+        metadata.setIsPersisted(true);
+        // Update cache
+        cacheManager.getMetadataCache().put(key, metadata);
+      }
+    }
+    // Sync hosted apps data is needed
+    if (cacheManager.syncHostedAppsMetadata()) {
+      Map<String, Set<String>> persistedData = null;
+      try {
+        persistedData = cacheManager.getPersistedHostedAppsData();
+      } catch (SQLException e) {
+        LOG.warn("Failed on fetching hosted apps data from store.", e);
+        return; // Something wrong with store
+      }
+
+      Map<String, Set<String>> cachedData = cacheManager.getHostedAppsCache();
+      Map<String, Set<String>> dataToSync = new HashMap<>();
+      if (cachedData != null && !cachedData.isEmpty()) {
+        for (Map.Entry<String, Set<String>> cacheEntry : cachedData.entrySet()) {
+          // No persistence / stale data in store
+          if (persistedData == null || persistedData.isEmpty() ||
+              !persistedData.containsKey(cacheEntry.getKey()) ||
+              !persistedData.get(cacheEntry.getKey()).containsAll(cacheEntry.getValue())) {
+            dataToSync.put(cacheEntry.getKey(), cacheEntry.getValue());
+          }
+        }
+        try {
+          cacheManager.persistHostedAppsMetadata(dataToSync);
+          cacheManager.markSuccessOnSyncHostedAppsMetadata();
+
+        } catch (SQLException e) {
+          LOG.warn("Error persisting hosted apps metadata.", e);
+        }
+      }
+
+    }
+  }
+}

+ 35 - 2
ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/PhoenixTransactSQL.java

@@ -22,10 +22,12 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.metrics2.sink.timeline.PrecisionLimitExceededException;
 import org.apache.hadoop.metrics2.sink.timeline.PrecisionLimitExceededException;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixHBaseAccessor;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixHBaseAccessor;
 import org.apache.hadoop.metrics2.sink.timeline.Precision;
 import org.apache.hadoop.metrics2.sink.timeline.Precision;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery.TimelineMetricMetadataKey;
 
 
 import java.sql.Connection;
 import java.sql.Connection;
 import java.sql.PreparedStatement;
 import java.sql.PreparedStatement;
 import java.sql.SQLException;
 import java.sql.SQLException;
+import java.util.Collection;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.TimeUnit;
 
 
 /**
 /**
@@ -102,6 +104,23 @@ public class PhoenixTransactSQL {
       "SERVER_TIME)) DATA_BLOCK_ENCODING='%s', IMMUTABLE_ROWS=true, " +
       "SERVER_TIME)) DATA_BLOCK_ENCODING='%s', IMMUTABLE_ROWS=true, " +
       "TTL=%s, COMPRESSION='%s'";
       "TTL=%s, COMPRESSION='%s'";
 
 
+  public static final String CREATE_METRICS_METADATA_TABLE_SQL =
+    "CREATE TABLE IF NOT EXISTS METRICS_METADATA " +
+      "(METRIC_NAME VARCHAR, " +
+      "APP_ID VARCHAR, " +
+      "UNITS CHAR(20), " +
+      "TYPE CHAR(20), " +
+      "START_TIME UNSIGNED_LONG, " +
+      "SUPPORTS_AGGREGATION BOOLEAN " +
+      "CONSTRAINT pk PRIMARY KEY (METRIC_NAME, APP_ID)) " +
+      "DATA_BLOCK_ENCODING='%s', COMPRESSION='%s'";
+
+  public static final String CREATE_HOSTED_APPS_METADATA_TABLE_SQL =
+    "CREATE TABLE IF NOT EXISTS HOSTED_APPS_METADATA " +
+      "(HOSTNAME VARCHAR, APP_IDS VARCHAR, " +
+      "CONSTRAINT pk PRIMARY KEY (HOSTNAME))" +
+      "DATA_BLOCK_ENCODING='%s', COMPRESSION='%s'";
+
   /**
   /**
    * ALTER table to set new options
    * ALTER table to set new options
    */
    */
@@ -148,6 +167,14 @@ public class PhoenixTransactSQL {
     "METRIC_COUNT) " +
     "METRIC_COUNT) " +
     "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)";
     "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)";
 
 
+  public static final String UPSERT_METADATA_SQL =
+    "UPSERT INTO METRICS_METADATA (METRIC_NAME, APP_ID, UNITS, TYPE, " +
+      "START_TIME, SUPPORTS_AGGREGATION) " +
+      "VALUES (?, ?, ?, ?, ?, ?)";
+
+  public static final String UPSERT_HOSTED_APPS_METADATA_SQL =
+    "UPSERT INTO HOSTED_APPS_METADATA (HOSTNAME, APP_IDS) VALUES (?, ?)";
+
   /**
   /**
    * Retrieve a set of rows from metrics records table.
    * Retrieve a set of rows from metrics records table.
    */
    */
@@ -217,6 +244,13 @@ public class PhoenixTransactSQL {
     "METRIC_MIN " +
     "METRIC_MIN " +
     "FROM %s";
     "FROM %s";
 
 
+  public static final String GET_METRIC_METADATA_SQL = "SELECT " +
+    "METRIC_NAME, APP_ID, UNITS, TYPE, START_TIME, " +
+    "SUPPORTS_AGGREGATION FROM METRICS_METADATA";
+
+  public static final String GET_HOSTED_APPS_METADATA_SQL = "SELECT " +
+    "HOSTNAME, APP_IDS FROM HOSTED_APPS_METADATA";
+
   /**
   /**
    * Aggregate host metrics using a GROUP BY clause to take advantage of
    * Aggregate host metrics using a GROUP BY clause to take advantage of
    * N - way parallel scan where N = number of regions.
    * N - way parallel scan where N = number of regions.
@@ -491,8 +525,7 @@ public class PhoenixTransactSQL {
   }
   }
 
 
   private static PreparedStatement setQueryParameters(PreparedStatement stmt,
   private static PreparedStatement setQueryParameters(PreparedStatement stmt,
-                                                      Condition condition)
-    throws SQLException {
+                                                      Condition condition) throws SQLException {
     int pos = 1;
     int pos = 1;
     //For GET_LATEST_METRIC_SQL_SINGLE_HOST parameters should be set 2 times
     //For GET_LATEST_METRIC_SQL_SINGLE_HOST parameters should be set 2 times
     do {
     do {

+ 46 - 13
ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java

@@ -20,11 +20,13 @@ package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
 
 
 import com.google.inject.Inject;
 import com.google.inject.Inject;
 import com.google.inject.Singleton;
 import com.google.inject.Singleton;
+import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Public;
 import org.apache.hadoop.classification.InterfaceAudience.Public;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
 import org.apache.hadoop.metrics2.sink.timeline.PrecisionLimitExceededException;
 import org.apache.hadoop.metrics2.sink.timeline.PrecisionLimitExceededException;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents;
@@ -65,6 +67,7 @@ import java.util.Collection;
 import java.util.EnumSet;
 import java.util.EnumSet;
 import java.util.HashSet;
 import java.util.HashSet;
 import java.util.List;
 import java.util.List;
+import java.util.Map;
 import java.util.Set;
 import java.util.Set;
 import java.util.SortedSet;
 import java.util.SortedSet;
 import java.util.TreeSet;
 import java.util.TreeSet;
@@ -149,15 +152,15 @@ public class TimelineWebServices {
     TimelineEntities entities = null;
     TimelineEntities entities = null;
     try {
     try {
       entities = store.getEntities(
       entities = store.getEntities(
-          parseStr(entityType),
-          parseLongStr(limit),
-          parseLongStr(windowStart),
-          parseLongStr(windowEnd),
-          parseStr(fromId),
-          parseLongStr(fromTs),
-          parsePairStr(primaryFilter, ":"),
-          parsePairsStr(secondaryFilter, ",", ":"),
-          parseFieldsStr(fields, ","));
+        parseStr(entityType),
+        parseLongStr(limit),
+        parseLongStr(windowStart),
+        parseLongStr(windowEnd),
+        parseStr(fromId),
+        parseLongStr(fromTs),
+        parsePairStr(primaryFilter, ":"),
+        parsePairsStr(secondaryFilter, ",", ":"),
+        parseFieldsStr(fields, ","));
     } catch (NumberFormatException e) {
     } catch (NumberFormatException e) {
       throw new BadRequestException(
       throw new BadRequestException(
           "windowStart, windowEnd or limit is not a numeric value.");
           "windowStart, windowEnd or limit is not a numeric value.");
@@ -339,11 +342,11 @@ public class TimelineWebServices {
    * @param precision Precision [ seconds, minutes, hours ]
    * @param precision Precision [ seconds, minutes, hours ]
    * @param limit limit on total number of {@link TimelineMetric} records
    * @param limit limit on total number of {@link TimelineMetric} records
    *              retrieved.
    *              retrieved.
-   * @return {@link TimelineMetrics}
+   * @return {@link @TimelineMetrics}
    */
    */
   @GET
   @GET
   @Path("/metrics")
   @Path("/metrics")
-  @Produces({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */})
+  @Produces({ MediaType.APPLICATION_JSON })
   public TimelineMetrics getTimelineMetrics(
   public TimelineMetrics getTimelineMetrics(
     @Context HttpServletRequest req,
     @Context HttpServletRequest req,
     @Context HttpServletResponse res,
     @Context HttpServletResponse res,
@@ -387,11 +390,41 @@ public class TimelineWebServices {
       throw new WebApplicationException(sql,
       throw new WebApplicationException(sql,
         Response.Status.INTERNAL_SERVER_ERROR);
         Response.Status.INTERNAL_SERVER_ERROR);
     } catch (IOException io) {
     } catch (IOException io) {
-      throw new WebApplicationException(io,
-        Response.Status.INTERNAL_SERVER_ERROR);
+      throw new WebApplicationException(io, Response.Status.INTERNAL_SERVER_ERROR);
     }
     }
   }
   }
 
 
+  @GET
+  @Path("/metrics/metadata")
+  @Produces({ MediaType.APPLICATION_JSON })
+  public Map<String, List<TimelineMetricMetadata>> getTimelineMetricMetadata(
+    @Context HttpServletRequest req,
+    @Context HttpServletResponse res
+  ) {
+    init(res);
+
+    try {
+      return timelineMetricStore.getTimelineMetricMetadata();
+    } catch (Exception e) {
+      throw new WebApplicationException(e, Response.Status.INTERNAL_SERVER_ERROR);
+    }
+  }
+
+  @GET
+  @Path("/metrics/hosts")
+  @Produces({ MediaType.APPLICATION_JSON })
+  public Map<String, Set<String>> getHostedAppsMetadata(
+    @Context HttpServletRequest req,
+    @Context HttpServletResponse res
+  ) {
+    init(res);
+
+    try {
+      return timelineMetricStore.getHostAppsMetadata();
+    } catch (Exception e) {
+      throw new WebApplicationException(e, Response.Status.INTERNAL_SERVER_ERROR);
+    }
+  }
 
 
   /**
   /**
    * Store the given entities into the timeline store, and return the errors
    * Store the given entities into the timeline store, and return the errors

+ 10 - 1
ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java

@@ -44,6 +44,8 @@ import java.net.URL;
 import java.net.URLClassLoader;
 import java.net.URLClassLoader;
 import java.sql.Connection;
 import java.sql.Connection;
 import java.sql.DriverManager;
 import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
 import java.sql.Statement;
 import java.sql.Statement;
 
 
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
@@ -156,15 +158,22 @@ public class TestApplicationHistoryServer {
 
 
     Connection connection = createNiceMock(Connection.class);
     Connection connection = createNiceMock(Connection.class);
     Statement stmt = createNiceMock(Statement.class);
     Statement stmt = createNiceMock(Statement.class);
+    PreparedStatement preparedStatement = createNiceMock(PreparedStatement.class);
+    ResultSet rs = createNiceMock(ResultSet.class);
     mockStatic(DriverManager.class);
     mockStatic(DriverManager.class);
     expect(DriverManager.getConnection("jdbc:phoenix:localhost:2181:/ams-hbase-unsecure"))
     expect(DriverManager.getConnection("jdbc:phoenix:localhost:2181:/ams-hbase-unsecure"))
       .andReturn(connection).anyTimes();
       .andReturn(connection).anyTimes();
     expect(connection.createStatement()).andReturn(stmt).anyTimes();
     expect(connection.createStatement()).andReturn(stmt).anyTimes();
+    expect(connection.prepareStatement(anyString())).andReturn(preparedStatement).anyTimes();
     suppress(method(Statement.class, "executeUpdate", String.class));
     suppress(method(Statement.class, "executeUpdate", String.class));
+    expect(preparedStatement.executeQuery()).andReturn(rs).anyTimes();
+    expect(rs.next()).andReturn(false).anyTimes();
+    preparedStatement.close();
+    expectLastCall().anyTimes();
     connection.close();
     connection.close();
     expectLastCall();
     expectLastCall();
 
 
-    EasyMock.replay(connection, stmt);
+    EasyMock.replay(connection, stmt, preparedStatement, rs);
     replayAll();
     replayAll();
 
 
     historyServer = new ApplicationHistoryServer();
     historyServer = new ApplicationHistoryServer();

+ 7 - 1
ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractMiniHBaseClusterTest.java

@@ -99,10 +99,16 @@ public abstract class AbstractMiniHBaseClusterTest extends BaseTest {
       stmt = conn.createStatement();
       stmt = conn.createStatement();
 
 
       stmt.execute("delete from METRIC_AGGREGATE");
       stmt.execute("delete from METRIC_AGGREGATE");
+      stmt.execute("delete from METRIC_AGGREGATE_MINUTE");
       stmt.execute("delete from METRIC_AGGREGATE_HOURLY");
       stmt.execute("delete from METRIC_AGGREGATE_HOURLY");
+      stmt.execute("delete from METRIC_AGGREGATE_DAILY");
       stmt.execute("delete from METRIC_RECORD");
       stmt.execute("delete from METRIC_RECORD");
-      stmt.execute("delete from METRIC_RECORD_HOURLY");
       stmt.execute("delete from METRIC_RECORD_MINUTE");
       stmt.execute("delete from METRIC_RECORD_MINUTE");
+      stmt.execute("delete from METRIC_RECORD_HOURLY");
+      stmt.execute("delete from METRIC_RECORD_DAILY");
+      stmt.execute("delete from METRICS_METADATA");
+      stmt.execute("delete from HOSTED_APPS_METADATA");
+
       conn.commit();
       conn.commit();
     } finally {
     } finally {
       if (stmt != null) {
       if (stmt != null) {

+ 7 - 2
ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/ITPhoenixHBaseAccessor.java

@@ -27,11 +27,13 @@ import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.TimelineClusterMetric;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.TimelineClusterMetric;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.TimelineMetricAggregator;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.TimelineMetricAggregator;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.TimelineMetricAggregatorFactory;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.TimelineMetricAggregatorFactory;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery.TimelineMetricMetadataManager;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.Condition;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.Condition;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.DefaultCondition;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.DefaultCondition;
 import org.junit.After;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.Test;
+
 import java.io.IOException;
 import java.io.IOException;
 import java.sql.Connection;
 import java.sql.Connection;
 import java.sql.SQLException;
 import java.sql.SQLException;
@@ -41,6 +43,7 @@ import java.util.HashMap;
 import java.util.Iterator;
 import java.util.Iterator;
 import java.util.List;
 import java.util.List;
 import java.util.Map;
 import java.util.Map;
+
 import static junit.framework.Assert.assertEquals;
 import static junit.framework.Assert.assertEquals;
 import static junit.framework.Assert.assertTrue;
 import static junit.framework.Assert.assertTrue;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.MetricTestHelper.createEmptyTimelineClusterMetric;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.MetricTestHelper.createEmptyTimelineClusterMetric;
@@ -204,7 +207,8 @@ public class ITPhoenixHBaseAccessor extends AbstractMiniHBaseClusterTest {
   public void testGetClusterMetricRecordsSeconds() throws Exception {
   public void testGetClusterMetricRecordsSeconds() throws Exception {
     // GIVEN
     // GIVEN
     TimelineMetricAggregator agg =
     TimelineMetricAggregator agg =
-      TimelineMetricAggregatorFactory.createTimelineClusterAggregatorSecond(hdb, new Configuration());
+      TimelineMetricAggregatorFactory.createTimelineClusterAggregatorSecond(
+        hdb, new Configuration(), new TimelineMetricMetadataManager(hdb, new Configuration()));
 
 
     long startTime = System.currentTimeMillis();
     long startTime = System.currentTimeMillis();
     long ctime = startTime + 1;
     long ctime = startTime + 1;
@@ -243,7 +247,8 @@ public class ITPhoenixHBaseAccessor extends AbstractMiniHBaseClusterTest {
   public void testGetClusterMetricRecordLatestWithFunction() throws Exception {
   public void testGetClusterMetricRecordLatestWithFunction() throws Exception {
     // GIVEN
     // GIVEN
     TimelineMetricAggregator agg =
     TimelineMetricAggregator agg =
-      TimelineMetricAggregatorFactory.createTimelineClusterAggregatorSecond(hdb, new Configuration());
+      TimelineMetricAggregatorFactory.createTimelineClusterAggregatorSecond
+        (hdb, new Configuration(), new TimelineMetricMetadataManager(hdb, new Configuration()));
 
 
     long startTime = System.currentTimeMillis();
     long startTime = System.currentTimeMillis();
     long ctime = startTime + 1;
     long ctime = startTime + 1;

+ 14 - 0
ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestTimelineMetricStore.java

@@ -19,13 +19,17 @@ package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline
 
 
 import org.apache.hadoop.metrics2.sink.timeline.Precision;
 import org.apache.hadoop.metrics2.sink.timeline.Precision;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
 import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
 import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
 import java.io.IOException;
 import java.io.IOException;
 import java.sql.SQLException;
 import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashMap;
 import java.util.List;
 import java.util.List;
+import java.util.Map;
+import java.util.Set;
 import java.util.TreeMap;
 import java.util.TreeMap;
 
 
 public class TestTimelineMetricStore implements TimelineMetricStore {
 public class TestTimelineMetricStore implements TimelineMetricStore {
@@ -80,4 +84,14 @@ public class TestTimelineMetricStore implements TimelineMetricStore {
 
 
     return new TimelinePutResponse();
     return new TimelinePutResponse();
   }
   }
+
+  @Override
+  public Map<String, List<TimelineMetricMetadata>> getTimelineMetricMetadata() throws SQLException, IOException {
+    return null;
+  }
+
+  @Override
+  public Map<String, Set<String>> getHostAppsMetadata() throws SQLException, IOException {
+    return Collections.emptyMap();
+  }
 }
 }

+ 11 - 5
ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/aggregators/ITClusterAggregator.java

@@ -31,6 +31,7 @@ import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.TimelineMetricAggregator;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.TimelineMetricAggregator;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.TimelineMetricAggregatorFactory;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.TimelineMetricAggregatorFactory;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.TimelineMetricReadHelper;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.TimelineMetricReadHelper;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery.TimelineMetricMetadataManager;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.Condition;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.Condition;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.DefaultCondition;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.DefaultCondition;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL;
@@ -75,7 +76,8 @@ public class ITClusterAggregator extends AbstractMiniHBaseClusterTest {
   public void testShouldAggregateClusterProperly() throws Exception {
   public void testShouldAggregateClusterProperly() throws Exception {
     // GIVEN
     // GIVEN
     TimelineMetricAggregator agg =
     TimelineMetricAggregator agg =
-      TimelineMetricAggregatorFactory.createTimelineClusterAggregatorSecond(hdb, getConfigurationForTest(false));
+      TimelineMetricAggregatorFactory.createTimelineClusterAggregatorSecond(hdb,
+        getConfigurationForTest(false), new TimelineMetricMetadataManager(hdb, new Configuration()));
     TimelineMetricReadHelper readHelper = new TimelineMetricReadHelper(false);
     TimelineMetricReadHelper readHelper = new TimelineMetricReadHelper(false);
 
 
     long startTime = System.currentTimeMillis();
     long startTime = System.currentTimeMillis();
@@ -127,7 +129,8 @@ public class ITClusterAggregator extends AbstractMiniHBaseClusterTest {
   public void testShouldAggregateClusterIgnoringInstance() throws Exception {
   public void testShouldAggregateClusterIgnoringInstance() throws Exception {
     // GIVEN
     // GIVEN
     TimelineMetricAggregator agg =
     TimelineMetricAggregator agg =
-      TimelineMetricAggregatorFactory.createTimelineClusterAggregatorSecond(hdb, getConfigurationForTest(false));
+      TimelineMetricAggregatorFactory.createTimelineClusterAggregatorSecond(hdb,
+        getConfigurationForTest(false), new TimelineMetricMetadataManager(hdb, new Configuration()));
     TimelineMetricReadHelper readHelper = new TimelineMetricReadHelper(false);
     TimelineMetricReadHelper readHelper = new TimelineMetricReadHelper(false);
 
 
     long startTime = System.currentTimeMillis();
     long startTime = System.currentTimeMillis();
@@ -202,7 +205,8 @@ public class ITClusterAggregator extends AbstractMiniHBaseClusterTest {
   public void testShouldAggregateDifferentMetricsOnClusterProperly() throws Exception {
   public void testShouldAggregateDifferentMetricsOnClusterProperly() throws Exception {
     // GIVEN
     // GIVEN
     TimelineMetricAggregator agg =
     TimelineMetricAggregator agg =
-      TimelineMetricAggregatorFactory.createTimelineClusterAggregatorSecond(hdb, getConfigurationForTest(false));
+      TimelineMetricAggregatorFactory.createTimelineClusterAggregatorSecond(hdb,
+        getConfigurationForTest(false), new TimelineMetricMetadataManager(hdb, new Configuration()));
     TimelineMetricReadHelper readHelper = new TimelineMetricReadHelper(false);
     TimelineMetricReadHelper readHelper = new TimelineMetricReadHelper(false);
 
 
     // here we put some metrics tha will be aggregated
     // here we put some metrics tha will be aggregated
@@ -485,7 +489,8 @@ public class ITClusterAggregator extends AbstractMiniHBaseClusterTest {
     Configuration conf = getConfigurationForTest(false);
     Configuration conf = getConfigurationForTest(false);
     conf.set(CLUSTER_AGGREGATOR_APP_IDS, "app1");
     conf.set(CLUSTER_AGGREGATOR_APP_IDS, "app1");
     TimelineMetricAggregator agg =
     TimelineMetricAggregator agg =
-      TimelineMetricAggregatorFactory.createTimelineClusterAggregatorSecond(hdb, conf);
+      TimelineMetricAggregatorFactory.createTimelineClusterAggregatorSecond(hdb,
+        conf, new TimelineMetricMetadataManager(hdb, new Configuration()));
     TimelineMetricReadHelper readHelper = new TimelineMetricReadHelper(false);
     TimelineMetricReadHelper readHelper = new TimelineMetricReadHelper(false);
 
 
     long startTime = System.currentTimeMillis();
     long startTime = System.currentTimeMillis();
@@ -536,7 +541,8 @@ public class ITClusterAggregator extends AbstractMiniHBaseClusterTest {
   @Test
   @Test
   public void testClusterAggregateMetricNormalization() throws Exception {
   public void testClusterAggregateMetricNormalization() throws Exception {
     TimelineMetricAggregator agg =
     TimelineMetricAggregator agg =
-      TimelineMetricAggregatorFactory.createTimelineClusterAggregatorSecond(hdb, getConfigurationForTest(false));
+      TimelineMetricAggregatorFactory.createTimelineClusterAggregatorSecond(hdb,
+        getConfigurationForTest(false), new TimelineMetricMetadataManager(hdb, new Configuration()));
     TimelineMetricReadHelper readHelper = new TimelineMetricReadHelper(false);
     TimelineMetricReadHelper readHelper = new TimelineMetricReadHelper(false);
 
 
     // Sample data
     // Sample data

+ 112 - 0
ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataManager.java

@@ -0,0 +1,112 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery;
+
+import junit.framework.Assert;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.AbstractMiniHBaseClusterTest;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.sql.SQLException;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeMap;
+
+public class TestMetadataManager extends AbstractMiniHBaseClusterTest {
+  TimelineMetricMetadataManager metadataManager;
+
+  @Before
+  public void insertDummyRecords() throws IOException, SQLException {
+    // Initialize new manager
+    metadataManager = new TimelineMetricMetadataManager(hdb, new Configuration());
+    final long now = System.currentTimeMillis();
+
+    TimelineMetrics timelineMetrics = new TimelineMetrics();
+    TimelineMetric metric1 = new TimelineMetric();
+    metric1.setMetricName("dummy_metric1");
+    metric1.setHostName("dummy_host1");
+    metric1.setTimestamp(now);
+    metric1.setStartTime(now - 1000);
+    metric1.setAppId("dummy_app1");
+    metric1.setType("Integer");
+    metric1.setMetricValues(new TreeMap<Long, Double>() {{
+      put(now - 100, 1.0);
+      put(now - 200, 2.0);
+      put(now - 300, 3.0);
+    }});
+    timelineMetrics.getMetrics().add(metric1);
+    TimelineMetric metric2 = new TimelineMetric();
+    metric2.setMetricName("dummy_metric2");
+    metric2.setHostName("dummy_host2");
+    metric2.setTimestamp(now);
+    metric2.setStartTime(now - 1000);
+    metric2.setAppId("dummy_app2");
+    metric2.setType("Integer");
+    metric2.setMetricValues(new TreeMap<Long, Double>() {{
+      put(now - 100, 1.0);
+      put(now - 200, 2.0);
+      put(now - 300, 3.0);
+    }});
+    timelineMetrics.getMetrics().add(metric2);
+
+    hdb.insertMetricRecordsWithMetadata(metadataManager, timelineMetrics);
+  }
+
+  @Test
+  public void testSaveMetricsMetadata() throws Exception {
+    Map<TimelineMetricMetadataKey, TimelineMetricMetadata> cachedData = metadataManager.getMetadataCache();
+
+    Assert.assertNotNull(cachedData);
+    Assert.assertEquals(2, cachedData.size());
+    TimelineMetricMetadataKey key1 = new TimelineMetricMetadataKey("dummy_metric1", "dummy_app1");
+    TimelineMetricMetadataKey key2 = new TimelineMetricMetadataKey("dummy_metric2", "dummy_app2");
+    TimelineMetricMetadata value1 = new TimelineMetricMetadata("dummy_metric1",
+      "dummy_app1", "Integer", null, 1L, true);
+    TimelineMetricMetadata value2 = new TimelineMetricMetadata("dummy_metric2",
+      "dummy_app2", "Integer", null, 1L, true);
+
+    Assert.assertEquals(value1, cachedData.get(key1));
+    Assert.assertEquals(value2, cachedData.get(key2));
+
+    TimelineMetricMetadataSync syncRunnable = new TimelineMetricMetadataSync(metadataManager);
+    syncRunnable.run();
+
+    Map<TimelineMetricMetadataKey, TimelineMetricMetadata> savedData =
+      hdb.getTimelineMetricMetadata();
+
+    Assert.assertNotNull(savedData);
+    Assert.assertEquals(2, savedData.size());
+    Assert.assertEquals(value1, savedData.get(key1));
+    Assert.assertEquals(value2, savedData.get(key2));
+
+    Map<String, Set<String>> cachedHostData = metadataManager.getHostedAppsCache();
+    Map<String, Set<String>> savedHostData = metadataManager.getPersistedHostedAppsData();
+    Assert.assertEquals(cachedData.size(), savedData.size());
+    Assert.assertEquals("dummy_app1", cachedHostData.get("dummy_host1").iterator().next());
+    Assert.assertEquals("dummy_app2", cachedHostData.get("dummy_host2").iterator().next());
+    Assert.assertEquals("dummy_app1", savedHostData.get("dummy_host1").iterator().next());
+    Assert.assertEquals("dummy_app2", savedHostData.get("dummy_host2").iterator().next());
+  }
+
+
+}

+ 15 - 868
ambari-server/pom.xml

@@ -268,7 +268,7 @@
       <plugin>
       <plugin>
         <groupId>org.codehaus.mojo</groupId>
         <groupId>org.codehaus.mojo</groupId>
         <artifactId>rpm-maven-plugin</artifactId>
         <artifactId>rpm-maven-plugin</artifactId>
-        <version>2.0.1</version>
+        <version>2.1.4</version>
         <executions>
         <executions>
           <execution>
           <execution>
             <!-- unbinds rpm creation from maven lifecycle -->
             <!-- unbinds rpm creation from maven lifecycle -->
@@ -305,406 +305,36 @@
             <scriptFile>src/main/package/rpm/posttrans_server.sh</scriptFile>
             <scriptFile>src/main/package/rpm/posttrans_server.sh</scriptFile>
             <fileEncoding>utf-8</fileEncoding>
             <fileEncoding>utf-8</fileEncoding>
           </posttransScriptlet>
           </posttransScriptlet>
-          <defaultFilemode>644</defaultFilemode>
-          <defaultDirmode>755</defaultDirmode>
-          <defaultUsername>root</defaultUsername>
-          <defaultGroupname>root</defaultGroupname>
           <needarch>x86_64</needarch>
           <needarch>x86_64</needarch>
           <mappings>
           <mappings>
             <mapping>
             <mapping>
-              <directory>/usr/lib/ambari-server</directory>
-              <dependency>
-              </dependency>
-            </mapping>
-            <mapping>
-              <directory>/usr/lib/ambari-server/web</directory>
-              <sources>
-                <source>
-                  <location>${ambari-web-dir}</location>
-                  <includes>
-                    <include>**</include>
-                  </includes>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/usr/lib/ambari-server</directory>
-              <sources>
-                <source>
-                  <location>${project.build.directory}/${project.artifactId}-${project.version}.jar</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>${ambari_commons.install.dir}</directory>
-              <sources>
-                <source>
-                  <location>
-                    ${project.basedir}/../ambari-common/src/main/python/ambari_commons
-                  </location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>${resource_management.install.dir}</directory>
-              <sources>
-                <source>
-                  <location>
-                    ${resourceManagementSrcLocation}
-                  </location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>${jinja.install.dir}</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>${project.basedir}/../ambari-common/src/main/python/ambari_jinja2/ambari_jinja2</location>
-                  <excludes>
-                    <exclude>${project.basedir}/../ambari-common/src/main/python/ambari_jinja2/ambari_jinja2/testsuite</exclude>
-                  </excludes>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>${simplejson.install.dir}</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>${project.basedir}/../ambari-common/src/main/python/ambari_simplejson</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/usr/sbin</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <directoryIncluded>false</directoryIncluded>
-              <sources>
-                <source>
-                  <location>src/main/python/ambari-server.py</location>
-                </source>
-                <source>
-                  <location>src/main/python/ambari_server_main.py</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/etc/init.d</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <directoryIncluded>false</directoryIncluded>
-              <sources>
-                <source>
-                  <location>sbin/ambari-server</location>
-                  <filter>true</filter>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <directoryIncluded>false</directoryIncluded>
-              <sources>
-                <source>
-                  <location>../ambari-common/src/main/unix/ambari-python-wrap</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/etc/ambari-server/conf</directory>
-              <configuration>true</configuration>
-              <sources>
-                <source>
-                  <location>${ambariProperties}</location>
-                </source>
-                <source>
-                  <location>conf/unix/log4j.properties</location>
-                </source>
-                <source>
-                  <location>conf/unix/krb5JAASLogin.conf</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/</directory>
-              <configuration>true</configuration>
-              <filemode>700</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>conf/unix/ambari-env.sh</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/</directory>
-              <filemode>700</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>conf/unix/ambari-sudo.sh</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/</directory>
-              <filemode>700</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>conf/unix/install-helper.sh</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/keys</directory>
-              <sources>
-                <source>
-                  <location>conf/unix/ca.config</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/keys/db</directory>
-              <filemode>700</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>src/main/resources/db</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/run/ambari-server/bootstrap</directory>
-            </mapping>
-            <mapping>
-              <directory>/var/run/ambari-server/stack-recommendations</directory>
-            </mapping>
-            <mapping>
-              <directory>/var/log/ambari-server</directory>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/resources</directory>
-              <sources>
-                <source>
-                  <location>target/classes/Ambari-DDL-Postgres-CREATE.sql</location>
-                </source>
-                <source>
-                  <location>src/main/resources/Ambari-DDL-Postgres-DROP.sql</location>
-                </source>
-                <source>
-                  <location>target/classes/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql</location>
-                </source>
-                <source>
-                  <location>src/main/resources/Ambari-DDL-Postgres-EMBEDDED-DROP.sql</location>
-                </source>
-                <source>
-                  <location>target/classes/Ambari-DDL-Oracle-CREATE.sql</location>
-                </source>
-                <source>
-                  <location>target/classes/Ambari-DDL-MySQL-CREATE.sql</location>
-                </source>
-                <source>
-                  <location>src/main/resources/Ambari-DDL-Oracle-DROP.sql</location>
-                </source>
-                <source>
-                  <location>src/main/resources/Ambari-DDL-MySQL-DROP.sql</location>
-                </source>
-                <source>
-                  <location>target/classes/Ambari-DDL-SQLServer-CREATE.sql</location>
-                </source>
-                <source>
-                  <location>target/classes/Ambari-DDL-SQLServer-CREATELOCAL.sql</location>
-                </source>
-                <source>
-                  <location>src/main/resources/Ambari-DDL-SQLServer-DROP.sql</location>
-                </source>
-                <source>
-                  <location>target/classes/Ambari-DDL-SQLAnywhere-CREATE.sql</location>
-                </source>
-                <source>
-                  <location>src/main/resources/Ambari-DDL-SQLAnywhere-DROP.sql</location>
-                </source>
-                <source>
-                  <location>${project.build.directory}/DBConnectionVerification.jar</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/data/tmp</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/data/cache</directory>
-              <filemode>700</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/resources/apps</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>src/main/resources/slider_resources/README.txt</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/resources/scripts</directory>
-              <filemode>755</filemode>
-              <sources>
-                <source>
-                  <location>src/main/resources/scripts</location>
-                </source>
-                <source>
-                  <location>src/main/python/upgradeHelper.py</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/resources/views</directory>
-              <filemode>755</filemode>
-              <sources>
-                <source>
-                  <location>${ambari-admin-dir}/target</location>
-                  <includes>
-                    <include>*.jar</include>
-                  </includes>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/resources/upgrade</directory>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/resources/upgrade/ddl</directory>
-              <sources>
-                <source>
-                  <location>src/main/resources/upgrade/ddl</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/resources/upgrade/dml</directory>
-              <sources>
-                <source>
-                  <location>src/main/resources/upgrade/dml</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/resources/common-services</directory>
-              <sources>
-                <source>
-                  <location>${commonServicesSrcLocation}</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/resources/upgrade/catalog</directory>
-              <sources>
-                <source>
-                  <location>src/main/resources/upgrade/catalog</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/resources/stacks/${stack.distribution}</directory>
-              <sources>
-                <source>
-                  <location>${stacksSrcLocation}</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/resources/stacks</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-              <sources>
-                <source>
-                  <location>target/classes/stacks/stack_advisor.py</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/usr/lib/python2.6/site-packages/ambari_server</directory>
-              <filemode>755</filemode>
+              <directory>/etc</directory>
               <username>root</username>
               <username>root</username>
               <groupname>root</groupname>
               <groupname>root</groupname>
+              <directoryIncluded>false</directoryIncluded> <!-- avoid managing /etc/init.d -->
               <sources>
               <sources>
                 <source>
                 <source>
-                  <location>src/main/python/ambari_server</location>
-                </source>
-                <source>
-                  <location>src/main/python/bootstrap.py</location>
-                </source>
-                <source>
-                  <location>src/main/python/setupAgent.py</location>
-                </source>
-                <source>
-                  <location>src/main/python/os_check_type.py</location>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/run/ambari-server</directory>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/resources</directory>
-              <sources>
-                <source>
-                  <location>../version</location>
-                  <filter>true</filter>
-                </source>
-              </sources>
-            </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-server/resources/custom_action_definitions</directory>
-              <sources>
-                <source>
-                  <location>src/main/resources/custom_action_definitions</location>
+                  <location>${project.build.directory}${dirsep}${project.artifactId}-${project.version}-dist/etc</location>
                 </source>
                 </source>
               </sources>
               </sources>
             </mapping>
             </mapping>
-            <mapping>
-              <!-- custom actions root-->
-              <directory>/var/lib/ambari-server/resources/custom_actions</directory>
-              <filemode>755</filemode>
+			<mapping>
+              <directory>/usr</directory>
               <username>root</username>
               <username>root</username>
               <groupname>root</groupname>
               <groupname>root</groupname>
               <sources>
               <sources>
                 <source>
                 <source>
-                  <location>${customActionsRoot}</location>
+                  <location>${project.build.directory}${dirsep}${project.artifactId}-${project.version}-dist/usr</location>
                 </source>
                 </source>
               </sources>
               </sources>
             </mapping>
             </mapping>
             <mapping>
             <mapping>
-              <directory>/var/lib/ambari-server/resources/host_scripts</directory>
-              <filemode>755</filemode>
+              <directory>/var</directory>
               <username>root</username>
               <username>root</username>
               <groupname>root</groupname>
               <groupname>root</groupname>
               <sources>
               <sources>
                 <source>
                 <source>
-                  <location>src/main/resources/host_scripts</location>
+                  <location>${project.build.directory}${dirsep}${project.artifactId}-${project.version}-dist/var</location>
                 </source>
                 </source>
               </sources>
               </sources>
             </mapping>
             </mapping>
@@ -714,7 +344,7 @@
       <plugin>
       <plugin>
         <groupId>org.vafer</groupId>
         <groupId>org.vafer</groupId>
         <artifactId>jdeb</artifactId>
         <artifactId>jdeb</artifactId>
-        <version>1.0.1</version>
+        <version>1.4</version>
         <executions>
         <executions>
           <execution>
           <execution>
             <!-- unbinds rpm creation from maven lifecycle -->
             <!-- unbinds rpm creation from maven lifecycle -->
@@ -726,498 +356,15 @@
         </executions>
         </executions>
         <configuration>
         <configuration>
           <controlDir>${basedir}/src/main/package/deb/control</controlDir>
           <controlDir>${basedir}/src/main/package/deb/control</controlDir>
-          <deb>${basedir}/target/${project.artifactId}_${package-version}-${package-release}.deb</deb>
+          <deb>${basedir}/target/${project.artifactId}_${package-version}-${package-release}-dist.deb</deb>
+          <skip>false</skip>
+          <skipPOMs>false</skipPOMs>
           <dataSet>
           <dataSet>
             <data>
             <data>
-              <type>template</type>
-              <paths>
-                <path>/usr/lib/ambari-server</path>
-                <path>/var/run/ambari-server</path>
-                <path>/var/run/ambari-server/bootstrap</path>
-                <path>/var/run/ambari-server/stack-recommendations</path>
-                <path>/var/log/ambari-server</path>
-                <path>/var/lib/ambari-server/resources/upgrade</path>
-                <path>/var/lib/ambari-server/data/tmp</path>
-                <path>/var/lib/ambari-server/data/cache</path>
-              </paths>
-            </data>
-            <!-- TODO: should be included all subdirs, if exists-->
-            <data>
-              <src>${basedir}/../ambari-web/public</src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/usr/lib/ambari-server/web</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>${project.build.directory}/${project.artifactId}-${project.version}-dist/${project.artifactId}-${project.version}/lib</src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/usr/lib</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/python/ambari-server.py</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/usr/sbin</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/python/ambari_server_main.py</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/usr/sbin</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/python/ambari-server.py</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/usr/sbin</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>../ambari-common/src/main/unix/ambari-python-wrap</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>${basedir}/target/ambari-server</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/etc/init.d/</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>${ambariProperties}</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/etc/ambari-server/conf</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>conf/unix/log4j.properties</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/etc/ambari-server/conf</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>conf/unix/krb5JAASLogin.conf</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/etc/ambari-server/conf</prefix>
-              </mapper>
-            </data>
-             <!-- /q001 -->
-            <data>
-              <src>conf/unix/ambari-env.sh</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>700</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>conf/unix/ambari-sudo.sh</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>700</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>conf/unix/install-helper.sh</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>700</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/resources/slider_resources/README.txt</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources/apps/</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>conf/unix/ca.config</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/keys</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/resources/db</src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/keys/db</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>700</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>target/classes/Ambari-DDL-Postgres-CREATE.sql</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/resources/Ambari-DDL-Postgres-DROP.sql</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>target/classes/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/resources/Ambari-DDL-Postgres-EMBEDDED-DROP.sql</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>target/classes/Ambari-DDL-Oracle-CREATE.sql</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>target/classes/Ambari-DDL-MySQL-CREATE.sql</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/resources/Ambari-DDL-Oracle-DROP.sql</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/resources/Ambari-DDL-MySQL-DROP.sql</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>target/classes/Ambari-DDL-SQLServer-CREATE.sql</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>target/classes/Ambari-DDL-SQLServer-CREATELOCAL.sql</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/resources/Ambari-DDL-SQLServer-DROP.sql</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>target/classes/Ambari-DDL-SQLAnywhere-CREATE.sql</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/resources/Ambari-DDL-SQLAnywhere-DROP.sql</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>${project.build.directory}/DBConnectionVerification.jar</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/resources/scripts</src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources/scripts</prefix>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>${ambari-admin-dir}/target</src>
-              <type>directory</type>
-              <includes>*.jar</includes>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources/views</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/python/upgradeHelper.py</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources/scripts</prefix>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/resources/upgrade/ddl</src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources/upgrade/ddl</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/resources/upgrade/dml</src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources/upgrade/dml</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>${commonServicesSrcLocation}</src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources/common-services</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/resources/upgrade/catalog</src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources/upgrade/catalog</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>${stacksSrcLocation}</src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources/stacks/${stack.distribution}</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>target/classes/stacks/stack_advisor.py</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources/stacks</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/python/ambari_server</src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/usr/lib/python2.6/site-packages/ambari_server</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/python/bootstrap.py</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/usr/lib/python2.6/site-packages/ambari_server</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/python/setupAgent.py</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/usr/lib/python2.6/site-packages/ambari_server</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/python/os_check_type.py</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/usr/lib/python2.6/site-packages/ambari_server</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>${basedir}/target/version</src>
-              <type>file</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/resources/custom_action_definitions</src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources/custom_action_definitions</prefix>
-              </mapper>
-            </data>
-            <data>
-              <src>${customActionsRoot}</src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources/custom_actions</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>
-            <data>
-              <src>src/main/resources/host_scripts</src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>/var/lib/ambari-server/resources/host_scripts</prefix>
-                <user>root</user>
-                <group>root</group>
-                <filemode>755</filemode>
-              </mapper>
-            </data>          
-            <data>
-              <src>
-                ${project.basedir}/../ambari-common/src/main/python/ambari_commons
-              </src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>${ambari_commons.install.dir}</prefix>
-                <filemode>755</filemode>
-                <user>root</user>
-                <group>root</group>
-              </mapper>
-            </data>
-            <data>
-              <src>
-                ${resourceManagementSrcLocation}
-              </src>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>${resource_management.install.dir}</prefix>
-                <filemode>755</filemode>
-                <user>root</user>
-                <group>root</group>
-              </mapper>
-            </data>
-            <data>
-              <src>${project.basedir}/../ambari-common/src/main/python/ambari_jinja2/ambari_jinja2</src>
-              <excludes>${project.basedir}/../ambari-common/src/main/python/ambari_jinja2/ambari_jinja2/testsuite</excludes>
-              <type>directory</type>
-              <mapper>
-                <type>perm</type>
-                <prefix>${jinja.install.dir}</prefix>
-                <filemode>755</filemode>
-                <user>root</user>
-                <group>root</group>
-              </mapper>
-            </data>
-            <data>
-              <src>${project.basedir}/../ambari-common/src/main/python/ambari_simplejson</src>
-              <type>directory</type>
+              <src>${project.build.directory}/${project.artifactId}-${project.version}-dist.tar.gz</src>
+              <type>archive</type>
               <mapper>
               <mapper>
                 <type>perm</type>
                 <type>perm</type>
-                <prefix>${simplejson.install.dir}</prefix>
-                <filemode>755</filemode>
                 <user>root</user>
                 <user>root</user>
                 <group>root</group>
                 <group>root</group>
               </mapper>
               </mapper>

+ 289 - 59
ambari-server/src/main/assemblies/server.xml

@@ -23,85 +23,315 @@
     <format>tar.gz</format>
     <format>tar.gz</format>
   </formats>
   </formats>
   <includeBaseDirectory>false</includeBaseDirectory>
   <includeBaseDirectory>false</includeBaseDirectory>
-  <files>
-    <file>
-      <source>${project.build.directory}/${artifact.artifactId}-${artifact.version}.jar</source>
-      <outputDirectory>ambari-server-${project.version}/lib/ambari-server</outputDirectory>
-    </file>
-   <file>
-      <source>${basedir}/src/main/python/ambari-server.py</source>
-      <outputDirectory>/ambari-server-${project.version}/sbin</outputDirectory>
-    </file>
-    <file>
-      <source>${basedir}/src/main/python/bootstrap.py</source>
-      <outputDirectory>/ambari-server-${project.version}/sbin</outputDirectory>
-    </file>
-    <file>
-      <source>${basedir}/src/main/python/setupAgent.py</source>
-      <outputDirectory>/ambari-server-${project.version}/sbin</outputDirectory>
-    </file>
-  </files>
+  <!-- File sets. Syntax:
+	  <fileSets>
+	    <fileSet>
+	      <useDefaultExcludes/>
+	      <outputDirectory/>
+	      <includes/>
+	      <excludes/>
+	      <fileMode/>
+	      <directoryMode/>
+	      <directory/>
+	      <lineEnding/>
+	      <filtered/>
+	    </fileSet>
+	  </fileSets>
+  -->
   <fileSets>
   <fileSets>
-    <!-- Distro files, readme, licenses, etc -->
     <fileSet>
     <fileSet>
-      <directory>${basedir}/../</directory>
-      <outputDirectory>ambari-server-${project.version}/</outputDirectory>
-      <includes>
-        <include>*.txt</include>
-      </includes>
+      <directory>${ambari-web-dir}</directory>
+      <outputDirectory>/usr/lib/ambari-server/web</outputDirectory>
     </fileSet>
     </fileSet>
-     <!--
     <fileSet>
     <fileSet>
-      <directory>${project.build.directory}/web/</directory>
-      <outputDirectory>ambari-server-${project.version}/web/</outputDirectory>
-      <includes>
-        <include>*</include>
-      </includes>
+      <directory>${project.basedir}/../ambari-common/src/main/python/ambari_commons</directory>
+      <outputDirectory>${ambari_commons.install.dir}</outputDirectory>
     </fileSet>
     </fileSet>
-    -->
-    <!--
     <fileSet>
     <fileSet>
-      <directory>${basedir}/src/main/bin</directory>
-      <outputDirectory>ambari-server-${project.version}/bin</outputDirectory>
-      <includes>
-        <include>*</include>
-      </includes>
-      <fileMode>0755</fileMode>
+      <directory>${resourceManagementSrcLocation}</directory>
+      <outputDirectory>${resource_management.install.dir}</outputDirectory>
     </fileSet>
     </fileSet>
-    -->
     <fileSet>
     <fileSet>
-      <directory>${basedir}/src/main/resources/</directory>
-      <outputDirectory>/ambari-server-${project.version}/keystore</outputDirectory>
-      <includes>
-        <include>db/*</include>
-        <include>ca.config</include>
-        <include>pass.txt</include>
-      </includes>
+      <directory>${project.basedir}/../ambari-common/src/main/python/ambari_jinja2/ambari_jinja2</directory>
+      <outputDirectory>${jinja.install.dir}</outputDirectory>
+      <excludes>
+      	<exclude>**/testsuite/**</exclude>
+      </excludes>
     </fileSet>
     </fileSet>
     <fileSet>
     <fileSet>
-      <directory>${basedir}/../ambari-web/public</directory>
-      <outputDirectory>ambari-server-${project.version}/web</outputDirectory>
-      <includes>
-        <include>**</include>
-      </includes>
+      <directory>${project.basedir}/../ambari-common/src/main/python/ambari_simplejson</directory>
+      <outputDirectory>${simplejson.install.dir}</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <fileMode>700</fileMode>
+      <directory>src/main/resources/db</directory>
+      <outputDirectory>/var/lib/ambari-server/keys/db</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directoryMode>755</directoryMode>
+      <directory>${empty.dir}</directory>
+      <outputDirectory>/var/run/ambari-server</outputDirectory>
+	  <excludes>
+	    <exclude>*/**</exclude>
+	  </excludes>
+    </fileSet>
+    <fileSet>
+      <directoryMode>755</directoryMode>
+      <directory>${empty.dir}</directory>
+      <outputDirectory>/var/run/ambari-server/bootstrap</outputDirectory>
+	  <excludes>
+	    <exclude>*/**</exclude>
+	  </excludes>
+    </fileSet>
+    <fileSet>
+      <directoryMode>755</directoryMode>
+      <directory>${empty.dir}</directory>
+      <outputDirectory>/var/run/ambari-server/stack-recommendations</outputDirectory>
+	  <excludes>
+	    <exclude>*/**</exclude>
+	  </excludes>
+    </fileSet>
+    <fileSet>
+      <directoryMode>755</directoryMode>
+      <directory>${empty.dir}</directory>
+      <outputDirectory>/var/log/ambari-server</outputDirectory>
+	  <excludes>
+	    <exclude>*/**</exclude>
+	  </excludes>
+    </fileSet>
+    <fileSet>
+      <directoryMode>777</directoryMode>
+      <directory>${empty.dir}</directory>
+      <outputDirectory>/var/lib/ambari-server/data/tmp</outputDirectory>
+	  <excludes>
+	    <exclude>*/**</exclude>
+	  </excludes>
     </fileSet>
     </fileSet>
     <fileSet>
     <fileSet>
-      <directory>src/main/conf</directory>
-      <outputDirectory>/ambari-server-${project.version}/etc/ambari-server/conf</outputDirectory>
+      <directoryMode>700</directoryMode>
+      <directory>${empty.dir}</directory>
+      <outputDirectory>/var/lib/ambari-server/data/cache</outputDirectory>
+	  <excludes>
+	    <exclude>*/**</exclude>
+	  </excludes>
     </fileSet>
     </fileSet>
     <fileSet>
     <fileSet>
-      <directory>${tarballResourcesFolder}</directory>
-      <outputDirectory>/ambari-server-${project.version}/var/lib/ambari-server/resources/</outputDirectory>
+      <directoryMode>700</directoryMode>
+      <directory>${empty.dir}</directory>
+      <outputDirectory>/var/lib/ambari-server/resources/upgrade</outputDirectory>
+	  <excludes>
+	    <exclude>*/**</exclude>
+	  </excludes>
+    </fileSet>
+    <fileSet>
+      <directory>src/main/resources/scripts</directory>
+      <outputDirectory>/var/lib/ambari-server/resources/scripts</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>${ambari-admin-dir}/target</directory>
+      <outputDirectory>/var/lib/ambari-server/resources/views</outputDirectory>
       <includes>
       <includes>
-        <include>common-services/**</include>
-        <include>stacks/stack_advisor.py</include>
-        <include>stacks/${stack.distribution}/**</include>
+        <include>*.jar</include>
       </includes>
       </includes>
     </fileSet>
     </fileSet>
+    <fileSet>
+      <directory>src/main/resources/upgrade/ddl</directory>
+      <outputDirectory>/var/lib/ambari-server/resources/upgrade/ddl</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>src/main/resources/upgrade/dml</directory>
+      <outputDirectory>/var/lib/ambari-server/resources/upgrade/dml</outputDirectory>
+    </fileSet>
+     <fileSet>
+      <directory>${commonServicesSrcLocation}</directory>
+      <outputDirectory>/var/lib/ambari-server/resources/common-services</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>src/main/resources/upgrade/catalog</directory>
+      <outputDirectory>/var/lib/ambari-server/resources/upgrade/catalog</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>${stacksSrcLocation}</directory>
+      <outputDirectory>/var/lib/ambari-server/resources/stacks/${stack.distribution}</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>src/main/python/ambari_server</directory>
+      <outputDirectory>/usr/lib/python2.6/site-packages/ambari_server</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>src/main/resources/custom_action_definitions</directory>
+      <outputDirectory>/var/lib/ambari-server/resources/custom_action_definitions</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>${customActionsRoot}</directory>
+      <outputDirectory>/var/lib/ambari-server/resources/custom_actions</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>src/main/resources/host_scripts</directory>
+      <outputDirectory>/var/lib/ambari-server/resources/host_scripts</outputDirectory>
+    </fileSet>
   </fileSets>
   </fileSets>
+  <!-- Single files. Syntax:
+	  <files>
+	    <file>
+	      <source/>
+	      <outputDirectory/>
+	      <destName/>
+	      <fileMode/>
+	      <lineEnding/>
+	      <filtered/>
+	    </file>
+	  </files>
+  -->
+  <files>
+    <file>
+      <source>${project.build.directory}/${project.artifactId}-${project.version}.jar</source>
+      <outputDirectory>/usr/lib/ambari-server</outputDirectory>
+    </file>
+    <file>
+      <fileMode>755</fileMode>
+      <source>src/main/python/ambari-server.py</source>
+      <outputDirectory>/usr/sbin</outputDirectory>
+    </file>
+    <file>
+      <fileMode>755</fileMode>
+      <source>src/main/python/ambari_server_main.py</source>
+      <outputDirectory>/usr/sbin</outputDirectory>
+    </file>
+    <file>
+      <fileMode>755</fileMode>
+      <source>sbin/ambari-server</source>
+      <outputDirectory>/etc/init.d</outputDirectory>
+    </file>
+    <file>
+      <fileMode>755</fileMode>
+      <source>../ambari-common/src/main/unix/ambari-python-wrap</source>
+      <outputDirectory>/var/lib/ambari-server/</outputDirectory>
+    </file>
+    <file>
+      <source>${ambariProperties}</source>
+      <outputDirectory>/etc/ambari-server/conf</outputDirectory>
+    </file>
+    <file>
+      <source>conf/unix/log4j.properties</source>
+      <outputDirectory>/etc/ambari-server/conf</outputDirectory>
+    </file>
+    <file>
+      <source>conf/unix/krb5JAASLogin.conf</source>
+      <outputDirectory>/etc/ambari-server/conf</outputDirectory>
+    </file>
+    <file>
+      <fileMode>700</fileMode>
+      <source>conf/unix/ambari-env.sh</source>
+      <outputDirectory>/var/lib/ambari-server/</outputDirectory>
+    </file>
+    <file>
+      <fileMode>700</fileMode>
+      <source>conf/unix/ambari-sudo.sh</source>
+      <outputDirectory>/var/lib/ambari-server/</outputDirectory>
+    </file>
+    <file>
+      <fileMode>700</fileMode>
+      <source>conf/unix/install-helper.sh</source>
+      <outputDirectory>/var/lib/ambari-server/</outputDirectory>
+    </file>
+    <file>
+      <source>conf/unix/ca.config</source>
+      <outputDirectory>/var/lib/ambari-server/keys</outputDirectory>
+    </file>
+    <file>
+      <source>target/classes/Ambari-DDL-Postgres-CREATE.sql</source>
+      <outputDirectory>/var/lib/ambari-server/resources</outputDirectory>
+    </file>
+    <file>
+      <source>src/main/resources/Ambari-DDL-Postgres-DROP.sql</source>
+      <outputDirectory>/var/lib/ambari-server/resources</outputDirectory>
+    </file>
+    <file>
+      <source>target/classes/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql</source>
+      <outputDirectory>/var/lib/ambari-server/resources</outputDirectory>
+    </file>
+    <file>
+      <source>src/main/resources/Ambari-DDL-Postgres-EMBEDDED-DROP.sql</source>
+      <outputDirectory>/var/lib/ambari-server/resources</outputDirectory>
+    </file>
+    <file>
+      <source>target/classes/Ambari-DDL-Oracle-CREATE.sql</source>
+      <outputDirectory>/var/lib/ambari-server/resources</outputDirectory>
+    </file>
+    <file>
+      <source>target/classes/Ambari-DDL-MySQL-CREATE.sql</source>
+      <outputDirectory>/var/lib/ambari-server/resources</outputDirectory>
+    </file>
+    <file>
+      <source>src/main/resources/Ambari-DDL-Oracle-DROP.sql</source>
+      <outputDirectory>/var/lib/ambari-server/resources</outputDirectory>
+    </file>
+    <file>
+      <source>src/main/resources/Ambari-DDL-MySQL-DROP.sql</source>
+      <outputDirectory>/var/lib/ambari-server/resources</outputDirectory>
+    </file>
+    <file>
+      <source>target/classes/Ambari-DDL-SQLServer-CREATE.sql</source>
+      <outputDirectory>/var/lib/ambari-server/resources</outputDirectory>
+    </file>
+    <file>
+      <source>target/classes/Ambari-DDL-SQLServer-CREATELOCAL.sql</source>
+      <outputDirectory>/var/lib/ambari-server/resources</outputDirectory>
+    </file>
+    <file>
+      <source>src/main/resources/Ambari-DDL-SQLServer-DROP.sql</source>
+      <outputDirectory>/var/lib/ambari-server/resources</outputDirectory>
+    </file>
+    <file>
+      <source>target/classes/Ambari-DDL-SQLAnywhere-CREATE.sql</source>
+      <outputDirectory>/var/lib/ambari-server/resources</outputDirectory>
+    </file>
+    <file>
+      <source>src/main/resources/Ambari-DDL-SQLAnywhere-DROP.sql</source>
+      <outputDirectory>/var/lib/ambari-server/resources</outputDirectory>
+    </file>
+    <file>
+      <source>${project.build.directory}/DBConnectionVerification.jar</source>
+      <outputDirectory>/var/lib/ambari-server/resources</outputDirectory>
+    </file>
+    <file>
+      <source>src/main/resources/slider_resources/README.txt</source>
+      <outputDirectory>/var/lib/ambari-server/resources/apps</outputDirectory>
+    </file>
+    <file>
+      <fileMode>755</fileMode>
+      <source>src/main/python/upgradeHelper.py</source>
+      <outputDirectory>/var/lib/ambari-server/resources/scripts</outputDirectory>
+    </file>
+    <file>
+      <fileMode>755</fileMode>
+      <source>target/classes/stacks/stack_advisor.py</source>
+      <outputDirectory>/var/lib/ambari-server/resources/stacks</outputDirectory>
+    </file>
+    <file>
+      <fileMode>755</fileMode>
+      <source>src/main/python/bootstrap.py</source>
+      <outputDirectory>/usr/lib/python2.6/site-packages/ambari_server</outputDirectory>
+    </file>
+    <file>
+      <fileMode>755</fileMode>
+      <source>src/main/python/setupAgent.py</source>
+      <outputDirectory>/usr/lib/python2.6/site-packages/ambari_server</outputDirectory>
+    </file>
+    <file>
+      <fileMode>755</fileMode>
+      <source>src/main/python/os_check_type.py</source>
+      <outputDirectory>/usr/lib/python2.6/site-packages/ambari_server</outputDirectory>
+    </file>
+    <file>
+      <source>${basedir}/target/version</source>
+      <outputDirectory>/var/lib/ambari-server/resources</outputDirectory>
+    </file>
+  </files>    
   <dependencySets>
   <dependencySets>
     <dependencySet>
     <dependencySet>
-      <outputDirectory>ambari-server-${project.version}/lib/ambari-server</outputDirectory>
+      <outputDirectory>/usr/lib/ambari-server</outputDirectory>
       <unpack>false</unpack>
       <unpack>false</unpack>
       <scope>compile</scope>
       <scope>compile</scope>
     </dependencySet>
     </dependencySet>

+ 5 - 0
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AdminSettingResourceProvider.java

@@ -19,6 +19,7 @@ package org.apache.ambari.server.controller.internal;
 
 
 import com.google.inject.Inject;
 import com.google.inject.Inject;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.DuplicateResourceException;
 import org.apache.ambari.server.StaticallyInject;
 import org.apache.ambari.server.StaticallyInject;
 import org.apache.ambari.server.controller.spi.NoSuchParentResourceException;
 import org.apache.ambari.server.controller.spi.NoSuchParentResourceException;
 import org.apache.ambari.server.controller.spi.NoSuchResourceException;
 import org.apache.ambari.server.controller.spi.NoSuchResourceException;
@@ -171,6 +172,10 @@ public class AdminSettingResourceProvider extends AbstractAuthorizedResourceProv
       @Override
       @Override
       public AdminSettingEntity invoke() throws AmbariException, AuthorizationException {
       public AdminSettingEntity invoke() throws AmbariException, AuthorizationException {
         AdminSettingEntity entity = toEntity(properties);
         AdminSettingEntity entity = toEntity(properties);
+        if (dao.findByName(entity.getName()) != null) {
+          throw new DuplicateResourceException(
+                  String.format("Setting already exists. setting name :%s ", entity.getName()));
+        }
         dao.create(entity);
         dao.create(entity);
         notifyCreate(Resource.Type.AdminSetting, request);
         notifyCreate(Resource.Type.AdminSetting, request);
         return entity;
         return entity;

+ 3 - 1
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java

@@ -23,6 +23,7 @@ import java.util.Collection;
 import java.util.EnumSet;
 import java.util.EnumSet;
 import java.util.HashMap;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.HashSet;
+import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.List;
 import java.util.Map;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Map.Entry;
@@ -223,7 +224,8 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
 
 
     Set<String> requestPropertyIds = getRequestPropertyIds(request, predicate);
     Set<String> requestPropertyIds = getRequestPropertyIds(request, predicate);
 
 
-    Set<Resource> results = new HashSet<Resource>();
+    // use a collection which preserves order since JPA sorts the results
+    Set<Resource> results = new LinkedHashSet<Resource>();
 
 
     for (Map<String, Object> propertyMap : getPropertyMaps(predicate)) {
     for (Map<String, Object> propertyMap : getPropertyMaps(predicate)) {
       String clusterName = (String) propertyMap.get(ALERT_DEF_CLUSTER_NAME);
       String clusterName = (String) propertyMap.get(ALERT_DEF_CLUSTER_NAME);

+ 4 - 2
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertResourceProvider.java

@@ -21,6 +21,7 @@ import java.util.Arrays;
 import java.util.Collections;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.HashSet;
+import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.List;
 import java.util.Map;
 import java.util.Map;
 import java.util.Set;
 import java.util.Set;
@@ -43,9 +44,9 @@ import org.apache.ambari.server.orm.dao.AlertsDAO;
 import org.apache.ambari.server.orm.entities.AlertCurrentEntity;
 import org.apache.ambari.server.orm.entities.AlertCurrentEntity;
 import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
 import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
 import org.apache.ambari.server.orm.entities.AlertHistoryEntity;
 import org.apache.ambari.server.orm.entities.AlertHistoryEntity;
+import org.apache.commons.lang.StringUtils;
 
 
 import com.google.inject.Inject;
 import com.google.inject.Inject;
-import org.apache.commons.lang.StringUtils;
 
 
 /**
 /**
  * ResourceProvider for Alert instances
  * ResourceProvider for Alert instances
@@ -150,7 +151,8 @@ public class AlertResourceProvider extends ReadOnlyResourceProvider implements
 
 
     Set<String> requestPropertyIds = getRequestPropertyIds(request, predicate);
     Set<String> requestPropertyIds = getRequestPropertyIds(request, predicate);
 
 
-    Set<Resource> results = new HashSet<Resource>();
+    // use a collection which preserves order since JPA sorts the results
+    Set<Resource> results = new LinkedHashSet<Resource>();
 
 
     for (Map<String, Object> propertyMap : getPropertyMaps(predicate)) {
     for (Map<String, Object> propertyMap : getPropertyMaps(predicate)) {
 
 

+ 2 - 1
ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariAuthorizationFilter.java

@@ -122,7 +122,8 @@ public class AmbariAuthorizationFilter implements Filter {
       }
       }
     }
     }
 
 
-    if (authentication == null || !authentication.isAuthenticated()) {
+    if (authentication == null || authentication instanceof AnonymousAuthenticationToken ||
+        !authentication.isAuthenticated()) {
       String token = httpRequest.getHeader(INTERNAL_TOKEN_HEADER);
       String token = httpRequest.getHeader(INTERNAL_TOKEN_HEADER);
       if (token != null) {
       if (token != null) {
         context.setAuthentication(new InternalAuthenticationToken(token));
         context.setAuthentication(new InternalAuthenticationToken(token));

+ 1 - 3
ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java

@@ -24,7 +24,6 @@ import com.google.gson.JsonObject;
 import com.google.gson.JsonParser;
 import com.google.gson.JsonParser;
 import com.google.inject.Inject;
 import com.google.inject.Inject;
 import com.google.inject.Injector;
 import com.google.inject.Injector;
-import com.google.inject.persist.Transactional;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.configuration.Configuration;
@@ -47,7 +46,6 @@ import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.Service;
 import org.apache.ambari.server.state.Service;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.StackId;
-import org.apache.ambari.server.state.kerberos.AbstractKerberosDescriptorContainer;
 import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
 import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
 import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
 import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
 import org.apache.ambari.server.state.kerberos.KerberosIdentityDescriptor;
 import org.apache.ambari.server.state.kerberos.KerberosIdentityDescriptor;
@@ -1648,7 +1646,7 @@ public class UpgradeCatalog210 extends AbstractUpgradeCatalog {
               newStormProps.put("nimbus.supervisors.users", "['{{storm_user}}']");
               newStormProps.put("nimbus.supervisors.users", "['{{storm_user}}']");
             }
             }
             if (!cluster.getDesiredConfigByType("storm-site").getProperties().containsKey("storm.zookeeper.superACL")) {
             if (!cluster.getDesiredConfigByType("storm-site").getProperties().containsKey("storm.zookeeper.superACL")) {
-              newStormProps.put("storm.zookeeper.superACL", "sasl:{{storm_base_jaas_principal}}");
+              newStormProps.put("storm.zookeeper.superACL", "sasl:{{storm_bare_jaas_principal}}");
             }
             }
             if (!cluster.getDesiredConfigByType("storm-site").getProperties().containsKey("ui.filter.params")) {
             if (!cluster.getDesiredConfigByType("storm-site").getProperties().containsKey("ui.filter.params")) {
               newStormProps.put("ui.filter.params", "{'type': 'kerberos', 'kerberos.principal': '{{storm_ui_jaas_principal}}', 'kerberos.keytab': '{{storm_ui_keytab_path}}', 'kerberos.name.rules': 'DEFAULT'}");
               newStormProps.put("ui.filter.params", "{'type': 'kerberos', 'kerberos.principal': '{{storm_ui_jaas_principal}}', 'kerberos.keytab': '{{storm_ui_keytab_path}}', 'kerberos.name.rules': 'DEFAULT'}");

+ 8 - 0
ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog221.java

@@ -162,6 +162,7 @@ public class UpgradeCatalog221 extends AbstractUpgradeCatalog {
     updateOozieConfigs();
     updateOozieConfigs();
     updateTezConfigs();
     updateTezConfigs();
     updateRangerKmsDbksConfigs();
     updateRangerKmsDbksConfigs();
+    updateAMSConfigs();
   }
   }
 
 
   protected void updateAlerts() {
   protected void updateAlerts() {
@@ -196,6 +197,13 @@ public class UpgradeCatalog221 extends AbstractUpgradeCatalog {
         alertDefinitionDAO.merge(alertDefinition);
         alertDefinitionDAO.merge(alertDefinition);
       }
       }
 
 
+      final AlertDefinitionEntity amsZookeeperProcessAlertDefinitionEntity = alertDefinitionDAO.findByName(
+        clusterID, "ams_metrics_collector_zookeeper_server_process");
+
+      if (amsZookeeperProcessAlertDefinitionEntity != null) {
+        LOG.info("Removing alert : ams_metrics_collector_zookeeper_server_process");
+        alertDefinitionDAO.remove(amsZookeeperProcessAlertDefinitionEntity);
+      }
     }
     }
   }
   }
 
 

+ 15 - 0
ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog222.java

@@ -31,6 +31,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.slf4j.LoggerFactory;
 
 
 import java.sql.SQLException;
 import java.sql.SQLException;
+import java.util.HashMap;
 import java.util.Map;
 import java.util.Map;
 
 
 /**
 /**
@@ -97,7 +98,21 @@ public class UpgradeCatalog222 extends AbstractUpgradeCatalog {
   protected void executeDMLUpdates() throws AmbariException, SQLException {
   protected void executeDMLUpdates() throws AmbariException, SQLException {
     addNewConfigurationsFromXml();
     addNewConfigurationsFromXml();
     updateAlerts();
     updateAlerts();
+    updateStormConfigs();
+  }
+
+  protected void updateStormConfigs() throws  AmbariException {
+    AmbariManagementController ambariManagementController = injector.getInstance(AmbariManagementController.class);
+    Map<String, Cluster> clusterMap = getCheckedClusterMap(ambariManagementController.getClusters());
 
 
+    for (final Cluster cluster : clusterMap.values()) {
+      if (cluster.getDesiredConfigByType("storm-site") != null && cluster.getDesiredConfigByType("storm-site").getProperties().containsKey("storm.zookeeper.superACL")
+              && cluster.getDesiredConfigByType("storm-site").getProperties().get("storm.zookeeper.superACL").equals("sasl:{{storm_base_jaas_principal}}")) {
+        Map<String, String> newStormProps = new HashMap<String, String>();
+        newStormProps.put("storm.zookeeper.superACL", "sasl:{{storm_bare_jaas_principal}}");
+        updateConfigurationPropertiesForCluster(cluster, "storm-site", newStormProps, true, false);
+      }
+    }
   }
   }
 
 
   protected void updateAlerts() {
   protected void updateAlerts() {

+ 14 - 0
ambari-server/src/main/resources/common-services/HAWQ/2.0.0/configuration/hawq-site.xml

@@ -205,4 +205,18 @@
     </description>
     </description>
   </property>
   </property>
 
 
+  <property>
+    <name>default_segment_num</name>
+    <display-name>Default Number of Virtual Segments</display-name>
+    <value>24</value>
+    <description>
+      The default number of virtual segments to use when executing a query statement. When the query is actually executed,
+      the number of virtual segments may differ from this number depending on the query's needs.
+      When expanding the cluster, you should adjust this number to reflect the number of nodes in the new cluster times the number of virtual segments per node.
+    </description>
+    <value-attributes>
+      <type>int</type>
+    </value-attributes>
+  </property>
+
 </configuration>
 </configuration>

+ 11 - 4
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_hive_metastore.py

@@ -24,6 +24,7 @@ import time
 import traceback
 import traceback
 import logging
 import logging
 
 
+from resource_management.core import global_lock
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.core.resources import Execute
 from resource_management.core.resources import Execute
@@ -145,13 +146,19 @@ def execute(configurations={}, parameters={}, host_name=None):
         kerberos_executable_search_paths = configurations[KERBEROS_EXECUTABLE_SEARCH_PATHS_KEY]
         kerberos_executable_search_paths = configurations[KERBEROS_EXECUTABLE_SEARCH_PATHS_KEY]
       else:
       else:
         kerberos_executable_search_paths = None
         kerberos_executable_search_paths = None
-             
+
       kinit_path_local = get_kinit_path(kerberos_executable_search_paths)
       kinit_path_local = get_kinit_path(kerberos_executable_search_paths)
       kinitcmd=format("{kinit_path_local} -kt {smokeuser_keytab} {smokeuser_principal}; ")
       kinitcmd=format("{kinit_path_local} -kt {smokeuser_keytab} {smokeuser_principal}; ")
 
 
-      Execute(kinitcmd, user=smokeuser,
-        path=["/bin/", "/usr/bin/", "/usr/lib/hive/bin/", "/usr/sbin/"],
-        timeout=10)
+      # prevent concurrent kinit
+      kinit_lock = global_lock.get_lock(global_lock.LOCK_TYPE_KERBEROS)
+      kinit_lock.acquire()
+      try:
+        Execute(kinitcmd, user=smokeuser,
+          path=["/bin/", "/usr/bin/", "/usr/lib/hive/bin/", "/usr/sbin/"],
+          timeout=10)
+      finally:
+        kinit_lock.release()
 
 
     if host_name is None:
     if host_name is None:
       host_name = socket.getfqdn()
       host_name = socket.getfqdn()

+ 0 - 6
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_webhcat_server.py

@@ -26,13 +26,7 @@ import traceback
 import logging
 import logging
 
 
 from resource_management.core.environment import Environment
 from resource_management.core.environment import Environment
-from resource_management.core.resources import Execute
-from resource_management.core import shell
-from resource_management.libraries.functions import format
-from resource_management.libraries.functions import get_kinit_path
-from resource_management.libraries.functions import get_klist_path
 from resource_management.libraries.functions.curl_krb_request import curl_krb_request
 from resource_management.libraries.functions.curl_krb_request import curl_krb_request
-from os import getpid, sep
 
 
 RESULT_CODE_OK = "OK"
 RESULT_CODE_OK = "OK"
 RESULT_CODE_CRITICAL = "CRITICAL"
 RESULT_CODE_CRITICAL = "CRITICAL"

+ 11 - 4
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/alerts/alert_check_oozie_server.py

@@ -17,17 +17,18 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 See the License for the specific language governing permissions and
 See the License for the specific language governing permissions and
 limitations under the License.
 limitations under the License.
 """
 """
+import os
+import re
+
+from resource_management.core import global_lock
 from resource_management.core.environment import Environment
 from resource_management.core.environment import Environment
 from resource_management.core.resources import Execute
 from resource_management.core.resources import Execute
-from resource_management.core.shell import call
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_klist_path
 from resource_management.libraries.functions import get_klist_path
 from ambari_commons.os_check import OSConst, OSCheck
 from ambari_commons.os_check import OSConst, OSCheck
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from urlparse import urlparse
 from urlparse import urlparse
-import os
-import re
 
 
 RESULT_CODE_OK = 'OK'
 RESULT_CODE_OK = 'OK'
 RESULT_CODE_CRITICAL = 'CRITICAL'
 RESULT_CODE_CRITICAL = 'CRITICAL'
@@ -143,7 +144,13 @@ def get_check_command(oozie_url, host_name, configurations, parameters, only_kin
     else:
     else:
       kinit_command = "{0} -s {1} || ".format(klist_path_local, ccache_file) + kinit_part_command
       kinit_command = "{0} -s {1} || ".format(klist_path_local, ccache_file) + kinit_part_command
 
 
-    Execute(kinit_command, environment=kerberos_env, user=user)
+    # prevent concurrent kinit
+    kinit_lock = global_lock.get_lock(global_lock.LOCK_TYPE_KERBEROS)
+    kinit_lock.acquire()
+    try:
+      Execute(kinit_command, environment=kerberos_env, user=user)
+    finally:
+      kinit_lock.release()
 
 
   # oozie configuration directory uses a symlink when > HDP 2.2
   # oozie configuration directory uses a symlink when > HDP 2.2
   oozie_config_directory = OOZIE_CONF_DIR_LEGACY
   oozie_config_directory = OOZIE_CONF_DIR_LEGACY

+ 1 - 1
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py

@@ -116,7 +116,7 @@ def oozie(is_server=False):
 
 
   # On some OS this folder could be not exists, so we will create it before pushing there files
   # On some OS this folder could be not exists, so we will create it before pushing there files
   Directory(params.limits_conf_dir,
   Directory(params.limits_conf_dir,
-            recursive=True,
+            create_parents=True,
             owner='root',
             owner='root',
             group='root'
             group='root'
   )
   )

+ 1 - 1
ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py

@@ -86,7 +86,7 @@ hive_user = status_params.hive_user
 spark_group = status_params.spark_group
 spark_group = status_params.spark_group
 user_group = status_params.user_group
 user_group = status_params.user_group
 spark_hdfs_user_dir = format("/user/{spark_user}")
 spark_hdfs_user_dir = format("/user/{spark_user}")
-spark_history_dir = 'hdfs:///spark-history'
+spark_history_dir = default('/configurations/spark-defaults/spark.history.fs.logDirectory', "hdfs:///spark-history")
 
 
 spark_history_server_pid_file = status_params.spark_history_server_pid_file
 spark_history_server_pid_file = status_params.spark_history_server_pid_file
 spark_thrift_server_pid_file = status_params.spark_thrift_server_pid_file
 spark_thrift_server_pid_file = status_params.spark_thrift_server_pid_file

+ 2 - 2
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/kerberos.json

@@ -36,8 +36,8 @@
         },
         },
         {
         {
           "core-site": {
           "core-site": {
-            "hadoop.proxyuser.yarn.groups": "*",
-            "hadoop.proxyuser.yarn.hosts": "${yarn-site/yarn.resourcemanager.hostname}"
+            "hadoop.proxyuser.${yarn-env/yarn_user}.groups": "*",
+            "hadoop.proxyuser.${yarn-env/yarn_user}.hosts": "${clusterHostInfo/rm_host}"
           }
           }
         }
         }
       ],
       ],

+ 0 - 2
ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-site.xml

@@ -1795,7 +1795,6 @@ limitations under the License.
   </property>
   </property>
   <property>
   <property>
     <name>hive.server2.authentication.pam.services</name>
     <name>hive.server2.authentication.pam.services</name>
-    <value></value>
     <property-type>DONT_ADD_ON_UPGRADE</property-type>
     <property-type>DONT_ADD_ON_UPGRADE</property-type>
     <depends-on>
     <depends-on>
       <property>
       <property>
@@ -1806,7 +1805,6 @@ limitations under the License.
   </property>
   </property>
   <property>
   <property>
     <name>hive.server2.custom.authentication.class</name>
     <name>hive.server2.custom.authentication.class</name>
-    <value></value>
     <property-type>DONT_ADD_ON_UPGRADE</property-type>
     <property-type>DONT_ADD_ON_UPGRADE</property-type>
     <depends-on>
     <depends-on>
       <property>
       <property>

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/kerberos.json

@@ -37,8 +37,8 @@
         },
         },
         {
         {
           "core-site": {
           "core-site": {
-            "hadoop.proxyuser.yarn.groups": "*",
-            "hadoop.proxyuser.yarn.hosts": "${yarn-site/yarn.resourcemanager.hostname}"
+            "hadoop.proxyuser.${yarn-env/yarn_user}.groups": "*",
+            "hadoop.proxyuser.${yarn-env/yarn_user}.hosts": "${clusterHostInfo/rm_host}"
           }
           }
         }
         }
       ],
       ],

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.3/services/ACCUMULO/widgets.json

@@ -83,7 +83,7 @@
             }
             }
           ],
           ],
           "properties": {
           "properties": {
-            "display_unit": "compactions",
+            "display_unit": "MinCs",
             "graph_type": "LINE",
             "graph_type": "LINE",
             "time_range": "1"
             "time_range": "1"
           }
           }
@@ -118,7 +118,7 @@
             }
             }
           ],
           ],
           "properties": {
           "properties": {
-            "display_unit": "compactions",
+            "display_unit": "MajCs",
             "graph_type": "LINE",
             "graph_type": "LINE",
             "time_range": "1"
             "time_range": "1"
           }
           }

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/kerberos.json

@@ -37,8 +37,8 @@
         },
         },
         {
         {
           "core-site": {
           "core-site": {
-            "hadoop.proxyuser.yarn.groups": "*",
-            "hadoop.proxyuser.yarn.hosts": "${yarn-site/yarn.resourcemanager.hostname}"
+            "hadoop.proxyuser.${yarn-env/yarn_user}.groups": "*",
+            "hadoop.proxyuser.${yarn-env/yarn_user}.hosts": "${clusterHostInfo/rm_host}"
           }
           }
         },
         },
         {
         {

+ 8 - 1
ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py

@@ -671,7 +671,14 @@ class HDP23StackAdvisor(HDP22StackAdvisor):
     if self.isHawqMasterComponentOnAmbariServer(services):
     if self.isHawqMasterComponentOnAmbariServer(services):
       if "hawq-site" in services["configurations"] and "hawq_master_address_port" in services["configurations"]["hawq-site"]["properties"]:
       if "hawq-site" in services["configurations"] and "hawq_master_address_port" in services["configurations"]["hawq-site"]["properties"]:
         putHawqSiteProperty('hawq_master_address_port', '')
         putHawqSiteProperty('hawq_master_address_port', '')
-          
+    # calculate optimal number of virtual segments
+    componentsListList = [service["components"] for service in services["services"]]
+    componentsList = [item["StackServiceComponents"] for sublist in componentsListList for item in sublist]
+    numSegments = len(self.__getHosts(componentsList, "HAWQSEGMENT"))
+    # update default if segments are deployed
+    if numSegments and "hawq-site" in services["configurations"] and "default_segment_num" in services["configurations"]["hawq-site"]["properties"]:
+      factor = 6 if numSegments < 50 else 4
+      putHawqSiteProperty('default_segment_num', numSegments * factor)
           
           
   def getServiceConfigurationValidators(self):
   def getServiceConfigurationValidators(self):
     parentValidators = super(HDP23StackAdvisor, self).getServiceConfigurationValidators()
     parentValidators = super(HDP23StackAdvisor, self).getServiceConfigurationValidators()

+ 5 - 0
ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml

@@ -268,6 +268,11 @@
       <skippable>true</skippable>  <!-- May fix configuration problems manually -->
       <skippable>true</skippable>  <!-- May fix configuration problems manually -->
       <supports-auto-skip-failure>false</supports-auto-skip-failure>
       <supports-auto-skip-failure>false</supports-auto-skip-failure>
 
 
+      <!-- HDFS -->
+      <execute-stage service="HDFS" component="NAMENODE" title="Apply config changes for NameNode">
+        <task xsi:type="configure" id="hdp_2_4_0_0_namenode_ha_adjustments"/>
+      </execute-stage>
+
       <!-- YARN -->
       <!-- YARN -->
       <execute-stage service="YARN" component="RESOURCEMANAGER" title="Calculating Yarn Properties for Spark">
       <execute-stage service="YARN" component="RESOURCEMANAGER" title="Calculating Yarn Properties for Spark">
         <task xsi:type="server_action" summary="Calculating Yarn Properties for Spark Shuffle" class="org.apache.ambari.server.serveraction.upgrades.SparkShufflePropertyConfig" />
         <task xsi:type="server_action" summary="Calculating Yarn Properties for Spark Shuffle" class="org.apache.ambari.server.serveraction.upgrades.SparkShufflePropertyConfig" />

+ 4 - 76
ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml

@@ -436,6 +436,10 @@
       <component name="NAMENODE">
       <component name="NAMENODE">
         <pre-downgrade /> <!--  no-op to prevent config changes on downgrade -->
         <pre-downgrade /> <!--  no-op to prevent config changes on downgrade -->
 
 
+        <pre-upgrade>
+          <task xsi:type="configure" id="hdp_2_4_0_0_namenode_ha_adjustments"/>
+        </pre-upgrade>
+
         <upgrade>
         <upgrade>
           <task xsi:type="restart-task" />
           <task xsi:type="restart-task" />
         </upgrade>
         </upgrade>
@@ -743,105 +747,29 @@
 
 
     <service name="STORM">
     <service name="STORM">
       <component name="NIMBUS">
       <component name="NIMBUS">
-        <pre-downgrade>
-          <task xsi:type="execute" summary="Removing Storm data from ZooKeeper">
-            <script>scripts/storm_upgrade.py</script>
-            <function>delete_storm_zookeeper_data</function>
-          </task>
-
-          <task xsi:type="execute" summary="Removing local Storm data">
-            <script>scripts/storm_upgrade.py</script>
-            <function>delete_storm_local_data</function>
-          </task>
-        </pre-downgrade>
-      
-        <pre-upgrade>
-          <task xsi:type="manual">
-            <message>Before continuing, please deactivate and kill any currently running topologies.</message>
-          </task>
-
-          <task xsi:type="execute" summary="Removing Storm data from ZooKeeper">
-            <script>scripts/storm_upgrade.py</script>
-            <function>delete_storm_zookeeper_data</function>
-          </task>
-
-          <task xsi:type="execute" summary="Removing local Storm data">
-            <script>scripts/storm_upgrade.py</script>
-            <function>delete_storm_local_data</function>
-          </task>
-
-        </pre-upgrade>
         <upgrade>
         <upgrade>
           <task xsi:type="restart-task" />
           <task xsi:type="restart-task" />
         </upgrade>
         </upgrade>
       </component>
       </component>
-
       <component name="STORM_REST_API">
       <component name="STORM_REST_API">
-        <pre-upgrade>
-          <task xsi:type="execute" summary="Removing local Storm data">
-            <script>scripts/storm_upgrade.py</script>
-            <function>delete_storm_local_data</function>
-          </task>
-        </pre-upgrade>
-
-        <pre-downgrade>
-          <task xsi:type="manual">
-            <message>Before continuing, please deactivate and kill any currently running topologies.</message>
-          </task>
-
-          <task xsi:type="execute" summary="Removing local Storm data">
-            <script>scripts/storm_upgrade.py</script>
-            <function>delete_storm_local_data</function>
-          </task>
-        </pre-downgrade>
-
         <upgrade>
         <upgrade>
           <task xsi:type="restart-task" />
           <task xsi:type="restart-task" />
         </upgrade>
         </upgrade>
       </component>
       </component>
-
       <component name="SUPERVISOR">
       <component name="SUPERVISOR">
-        <pre-upgrade>
-          <task xsi:type="execute" summary="Removing local Storm data">
-            <script>scripts/storm_upgrade.py</script>
-            <function>delete_storm_local_data</function>
-          </task>
-        </pre-upgrade>
         <upgrade>
         <upgrade>
           <task xsi:type="restart-task" />
           <task xsi:type="restart-task" />
         </upgrade>
         </upgrade>
       </component>
       </component>
-
       <component name="STORM_UI_SERVER">
       <component name="STORM_UI_SERVER">
-        <pre-upgrade>
-          <task xsi:type="execute" summary="Removing local Storm data">
-            <script>scripts/storm_upgrade.py</script>
-            <function>delete_storm_local_data</function>
-          </task>
-        </pre-upgrade>
-
         <upgrade>
         <upgrade>
           <task xsi:type="restart-task" />
           <task xsi:type="restart-task" />
         </upgrade>
         </upgrade>
       </component>
       </component>
-
       <component name="DRPC_SERVER">
       <component name="DRPC_SERVER">
-        <pre-upgrade>
-          <task xsi:type="execute" summary="Removing local Storm data">
-            <script>scripts/storm_upgrade.py</script>
-            <function>delete_storm_local_data</function>
-          </task>
-        </pre-upgrade>
-
         <upgrade>
         <upgrade>
           <task xsi:type="restart-task" />
           <task xsi:type="restart-task" />
         </upgrade>
         </upgrade>
-
-        <post-upgrade>
-          <task xsi:type="manual">
-            <message>Please rebuild your topology using the new Storm version dependencies and resubmit it using the newly created jar.</message>
-          </task>
-        </post-upgrade>
       </component>
       </component>
     </service>
     </service>
 
 

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml

@@ -31,7 +31,7 @@
     </property>
     </property>
     <property>
     <property>
         <name>spark.history.fs.logDirectory</name>
         <name>spark.history.fs.logDirectory</name>
-        <value>{{spark_history_dir}}</value>
+        <value>hdfs:///spark-history</value>
         <description>
         <description>
             Base directory for history spark application log.
             Base directory for history spark application log.
         </description>
         </description>
@@ -45,7 +45,7 @@
     </property>
     </property>
     <property>
     <property>
         <name>spark.eventLog.dir</name>
         <name>spark.eventLog.dir</name>
-        <value>{{spark_history_dir}}</value>
+        <value>hdfs:///spark-history</value>
         <description>
         <description>
             Base directory in which Spark events are logged, if spark.eventLog.enabled is true.
             Base directory in which Spark events are logged, if spark.eventLog.enabled is true.
         </description>
         </description>

+ 26 - 5
ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml

@@ -75,14 +75,17 @@
   <property>
   <property>
     <name>spark.history.fs.logDirectory</name>
     <name>spark.history.fs.logDirectory</name>
     <value>{{spark_history_dir}}</value>
     <value>{{spark_history_dir}}</value>
+    <final>true</final>
     <description>
     <description>
-      Base directory for history spark application log.
+      Base directory for history spark application log. It is the same value
+      as in spark-defaults.xml.
     </description>
     </description>
   </property>
   </property>
 
 
   <property>
   <property>
     <name>spark.eventLog.enabled</name>
     <name>spark.eventLog.enabled</name>
     <value>true</value>
     <value>true</value>
+    <final>true</final>
     <description>
     <description>
       Whether to log Spark events, useful for reconstructing the Web UI after the application has finished.
       Whether to log Spark events, useful for reconstructing the Web UI after the application has finished.
     </description>
     </description>
@@ -91,8 +94,10 @@
   <property>
   <property>
     <name>spark.eventLog.dir</name>
     <name>spark.eventLog.dir</name>
     <value>{{spark_history_dir}}</value>
     <value>{{spark_history_dir}}</value>
+    <final>true</final>
     <description>
     <description>
-      Base directory in which Spark events are logged, if spark.eventLog.enabled is true.
+      Base directory in which Spark events are logged, if spark.eventLog.enabled is true. It is the same value
+      as in spark-defaults.xml.
     </description>
     </description>
   </property>
   </property>
 
 
@@ -138,10 +143,26 @@
   </property>
   </property>
 
 
   <property>
   <property>
-    <name>spark.executor.instances</name>
-    <value>2</value>
+    <name>spark.dynamicAllocation.initialExecutors</name>
+    <value>0</value>
     <description>
     <description>
-      The number of executor.
+      Initial number of executors to run if dynamic allocation is enabled.
+    </description>
+  </property>
+
+  <property>
+    <name>spark.dynamicAllocation.maxExecutors</name>
+    <value>10</value>
+    <description>
+      Upper bound for the number of executors if dynamic allocation is enabled.
+    </description>
+  </property>
+
+  <property>
+    <name>spark.dynamicAllocation.minExecutors</name>
+    <value>0</value>
+    <description>
+      Lower bound for the number of executors if dynamic allocation is enabled.
     </description>
     </description>
   </property>
   </property>
 
 

+ 11 - 15
ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml

@@ -20,6 +20,17 @@
 
 
   <services>
   <services>
 
 
+    <service name="HDFS">
+      <component name="NAMENODE">
+        <changes>
+          <definition xsi:type="configure" id="hdp_2_4_0_0_namenode_ha_adjustments">
+            <type>hdfs-site</type>
+            <transfer operation="delete" delete-key="dfs.namenode.rpc-address" if-type="hdfs-site" if-key="dfs.nameservices" if-key-state="present"/>
+          </definition>
+        </changes>
+      </component>
+    </service>
+
     <service name="HBASE">
     <service name="HBASE">
       <component name="HBASE_MASTER">
       <component name="HBASE_MASTER">
         <changes>
         <changes>
@@ -161,9 +172,6 @@
             <type>spark-defaults</type>
             <type>spark-defaults</type>
             <transfer operation="delete" delete-key="spark.yarn.services" />
             <transfer operation="delete" delete-key="spark.yarn.services" />
             <set key="spark.history.provider" value="org.apache.spark.deploy.history.FsHistoryProvider"/>
             <set key="spark.history.provider" value="org.apache.spark.deploy.history.FsHistoryProvider"/>
-            <set key="spark.history.fs.logDirectory" value="{{spark_history_dir}}"/>
-            <set key="spark.eventLog.enabled" value="true"/>
-            <set key="spark.eventLog.dir" value="{{spark_history_dir}}"/>
           </definition>
           </definition>
         </changes>
         </changes>
       </component>
       </component>
@@ -179,18 +187,6 @@
             <transfer operation="delete" delete-key="spark.yarn.submit.file.replication" />
             <transfer operation="delete" delete-key="spark.yarn.submit.file.replication" />
             <transfer operation="delete" delete-key="spark.yarn.preserve.staging.files" />
             <transfer operation="delete" delete-key="spark.yarn.preserve.staging.files" />
             <transfer operation="delete" delete-key="spark.yarn.max.executor.failures" />
             <transfer operation="delete" delete-key="spark.yarn.max.executor.failures" />
-            <set key="spark.history.provider" value="org.apache.spark.deploy.history.FsHistoryProvider"/>
-            <set key="spark.history.fs.logDirectory" value="{{spark_history_dir}}"/>
-            <set key="spark.eventLog.enabled" value="true"/>
-            <set key="spark.eventLog.dir" value="{{spark_history_dir}}"/>
-            <set key="spark.master" value="{{spark_thrift_master}}"/>
-            <set key="spark.scheduler.allocation.file" value="{{spark_conf}}/spark-thrift-fairscheduler.xml"/>
-            <set key="spark.scheduler.mode" value="FAIR"/>
-            <set key="spark.shuffle.service.enabled" value="true"/>
-            <set key="spark.dynamicAllocation.enabled" value="true"/>
-            <set key="spark.executor.instances" value="2"/>
-            <set key="spark.yarn.am.memory" value="512m"/>
-            <set key="spark.executor.memory" value="1g"/>
           </definition>
           </definition>
         </changes>
         </changes>
       </component>
       </component>

+ 13 - 2
ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AdminSettingResourceProviderTest.java

@@ -18,10 +18,12 @@
 package org.apache.ambari.server.controller.internal;
 package org.apache.ambari.server.controller.internal;
 
 
 import com.google.common.collect.Lists;
 import com.google.common.collect.Lists;
+import org.apache.ambari.server.DuplicateResourceException;
 import org.apache.ambari.server.controller.spi.Predicate;
 import org.apache.ambari.server.controller.spi.Predicate;
 import org.apache.ambari.server.controller.spi.Request;
 import org.apache.ambari.server.controller.spi.Request;
 import org.apache.ambari.server.controller.spi.RequestStatus;
 import org.apache.ambari.server.controller.spi.RequestStatus;
 import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.ResourceAlreadyExistsException;
 import org.apache.ambari.server.controller.utilities.PredicateBuilder;
 import org.apache.ambari.server.controller.utilities.PredicateBuilder;
 import org.apache.ambari.server.controller.utilities.PropertyHelper;
 import org.apache.ambari.server.controller.utilities.PropertyHelper;
 import org.apache.ambari.server.orm.dao.AdminSettingDAO;
 import org.apache.ambari.server.orm.dao.AdminSettingDAO;
@@ -190,6 +192,7 @@ public class AdminSettingResourceProviderTest {
     Capture<AdminSettingEntity> entityCapture = Capture.newInstance();
     Capture<AdminSettingEntity> entityCapture = Capture.newInstance();
     Request request = createRequest(entity);
     Request request = createRequest(entity);
 
 
+    expect(dao.findByName(entity.getName())).andReturn(null);
     dao.create(capture(entityCapture));
     dao.create(capture(entityCapture));
     mockControl.replay();
     mockControl.replay();
 
 
@@ -204,6 +207,16 @@ public class AdminSettingResourceProviderTest {
     assertEquals(AuthorizationHelper.getAuthenticatedName(), capturedEntity.getUpdatedBy());
     assertEquals(AuthorizationHelper.getAuthenticatedName(), capturedEntity.getUpdatedBy());
   }
   }
 
 
+  @Test(expected = ResourceAlreadyExistsException.class)
+  public void testCreateDuplicateResource() throws Exception {
+    setupAuthenticationForAdmin();
+    AdminSettingEntity entity = newEntity("motd");
+    Request request = createRequest(entity);
+
+    expect(dao.findByName(entity.getName())).andReturn(entity);
+    mockControl.replay();
+    resourceProvider.createResources(request);
+  }
 
 
   @Test(expected = AuthorizationException.class)
   @Test(expected = AuthorizationException.class)
   public void testUpdateResources_noAuth() throws Exception {
   public void testUpdateResources_noAuth() throws Exception {
@@ -211,7 +224,6 @@ public class AdminSettingResourceProviderTest {
     resourceProvider.updateResources(updateRequest(newEntity("motd")), null);
     resourceProvider.updateResources(updateRequest(newEntity("motd")), null);
   }
   }
 
 
-
   @Test(expected = AuthorizationException.class)
   @Test(expected = AuthorizationException.class)
   public void testUpdateResources_clusterUser() throws Exception {
   public void testUpdateResources_clusterUser() throws Exception {
     setupAuthenticationForClusterUser();
     setupAuthenticationForClusterUser();
@@ -219,7 +231,6 @@ public class AdminSettingResourceProviderTest {
     resourceProvider.updateResources(updateRequest(newEntity("motd")), null);
     resourceProvider.updateResources(updateRequest(newEntity("motd")), null);
   }
   }
 
 
-
   @Test
   @Test
   public void testUpdateResources_admin() throws Exception {
   public void testUpdateResources_admin() throws Exception {
     setupAuthenticationForAdmin();
     setupAuthenticationForAdmin();

+ 4 - 0
ambari-server/src/test/java/org/apache/ambari/server/orm/dao/AdminSettingDAOTest.java

@@ -31,6 +31,7 @@ import java.util.Map;
 import java.util.Objects;
 import java.util.Objects;
 
 
 import static junit.framework.Assert.assertEquals;
 import static junit.framework.Assert.assertEquals;
+import static junit.framework.Assert.assertNull;
 
 
 public class AdminSettingDAOTest {
 public class AdminSettingDAOTest {
   private  Injector injector;
   private  Injector injector;
@@ -63,6 +64,9 @@ public class AdminSettingDAOTest {
     retrieveAndValidateSame(entities);
     retrieveAndValidateSame(entities);
     assertEquals(entities.size(), dao.findAll().size());
     assertEquals(entities.size(), dao.findAll().size());
 
 
+    //Should return null if doesn't exist.
+    assertNull(dao.findByName("does-not-exist"));
+
 
 
     //Update
     //Update
     for(Map.Entry<String, AdminSettingEntity> entry : entities.entrySet()) {
     for(Map.Entry<String, AdminSettingEntity> entry : entities.entrySet()) {

+ 38 - 0
ambari-server/src/test/java/org/apache/ambari/server/orm/dao/AlertsDAOTest.java

@@ -411,6 +411,44 @@ public class AlertsDAOTest {
     assertEquals(0, currentAlerts.size());
     assertEquals(0, currentAlerts.size());
   }
   }
 
 
+  /**
+   * Tests that the Ambari sort is correctly applied to JPA quuery.
+   *
+   * @throws Exception
+   */
+  @Test
+  public void testAlertCurrentSorting() throws Exception {
+    AlertCurrentRequest request = new AlertCurrentRequest();
+
+    Predicate clusterPredicate = new PredicateBuilder().property(
+        AlertResourceProvider.ALERT_CLUSTER_NAME).equals(m_cluster.getClusterName()).toPredicate();
+
+    request.Predicate = clusterPredicate;
+
+    SortRequestProperty sortRequestProperty = new SortRequestProperty(AlertResourceProvider.ALERT_ID, Order.ASC);
+    request.Sort = new SortRequestImpl(Collections.singletonList(sortRequestProperty));
+
+    List<AlertCurrentEntity> currentAlerts = m_dao.findAll(request);
+    assertTrue(currentAlerts.size() >= 5);
+    long lastId = Long.MIN_VALUE;
+    for (AlertCurrentEntity alert : currentAlerts) {
+      assertTrue(lastId < alert.getAlertId());
+      lastId = alert.getAlertId();
+    }
+
+    // change the sort to DESC
+    sortRequestProperty = new SortRequestProperty(AlertResourceProvider.ALERT_ID, Order.DESC);
+    request.Sort = new SortRequestImpl(Collections.singletonList(sortRequestProperty));
+
+    currentAlerts = m_dao.findAll(request);
+    assertTrue(currentAlerts.size() >= 5);
+    lastId = Long.MAX_VALUE;
+    for (AlertCurrentEntity alert : currentAlerts) {
+      assertTrue(lastId > alert.getAlertId());
+      lastId = alert.getAlertId();
+    }
+  }
+
   /**
   /**
    * Tests that the {@link AlertCurrentEntity} fields are updated properly when
    * Tests that the {@link AlertCurrentEntity} fields are updated properly when
    * a new {@link AlertHistoryEntity} is associated.
    * a new {@link AlertHistoryEntity} is associated.

+ 47 - 0
ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog221Test.java

@@ -39,7 +39,9 @@ import org.apache.ambari.server.controller.KerberosHelper;
 import org.apache.ambari.server.controller.MaintenanceStateHelper;
 import org.apache.ambari.server.controller.MaintenanceStateHelper;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
+import org.apache.ambari.server.orm.dao.AlertDefinitionDAO;
 import org.apache.ambari.server.orm.dao.StackDAO;
 import org.apache.ambari.server.orm.dao.StackDAO;
+import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
 import org.apache.ambari.server.orm.entities.StackEntity;
 import org.apache.ambari.server.orm.entities.StackEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Clusters;
@@ -144,6 +146,7 @@ public class UpgradeCatalog221Test {
     Method updateOozieConfigs = UpgradeCatalog221.class.getDeclaredMethod("updateOozieConfigs");
     Method updateOozieConfigs = UpgradeCatalog221.class.getDeclaredMethod("updateOozieConfigs");
     Method updateTezConfigs = UpgradeCatalog221.class.getDeclaredMethod("updateTezConfigs");
     Method updateTezConfigs = UpgradeCatalog221.class.getDeclaredMethod("updateTezConfigs");
     Method updateRangerKmsDbksConfigs = UpgradeCatalog221.class.getDeclaredMethod("updateRangerKmsDbksConfigs");
     Method updateRangerKmsDbksConfigs = UpgradeCatalog221.class.getDeclaredMethod("updateRangerKmsDbksConfigs");
+    Method updateAMSConfigs = UpgradeCatalog221.class.getDeclaredMethod("updateAMSConfigs");
 
 
     UpgradeCatalog221 upgradeCatalog221 = createMockBuilder(UpgradeCatalog221.class)
     UpgradeCatalog221 upgradeCatalog221 = createMockBuilder(UpgradeCatalog221.class)
       .addMockedMethod(addNewConfigurationsFromXml)
       .addMockedMethod(addNewConfigurationsFromXml)
@@ -151,6 +154,7 @@ public class UpgradeCatalog221Test {
       .addMockedMethod(updateOozieConfigs)
       .addMockedMethod(updateOozieConfigs)
       .addMockedMethod(updateTezConfigs)
       .addMockedMethod(updateTezConfigs)
       .addMockedMethod(updateRangerKmsDbksConfigs)
       .addMockedMethod(updateRangerKmsDbksConfigs)
+      .addMockedMethod(updateAMSConfigs)
       .createMock();
       .createMock();
 
 
     upgradeCatalog221.addNewConfigurationsFromXml();
     upgradeCatalog221.addNewConfigurationsFromXml();
@@ -163,6 +167,8 @@ public class UpgradeCatalog221Test {
     expectLastCall().once();
     expectLastCall().once();
     upgradeCatalog221.updateRangerKmsDbksConfigs();
     upgradeCatalog221.updateRangerKmsDbksConfigs();
     expectLastCall().once();
     expectLastCall().once();
+    upgradeCatalog221.updateAMSConfigs();
+    expectLastCall().once();
 
 
 
 
     replay(upgradeCatalog221);
     replay(upgradeCatalog221);
@@ -556,4 +562,45 @@ public class UpgradeCatalog221Test {
     String result = (String) updateAmsEnvContent.invoke(upgradeCatalog221, oldContent);
     String result = (String) updateAmsEnvContent.invoke(upgradeCatalog221, oldContent);
     Assert.assertEquals(expectedContent, result);
     Assert.assertEquals(expectedContent, result);
   }
   }
+
+  @Test
+  public void testUpdateAlertDefinitions() {
+    EasyMockSupport easyMockSupport = new EasyMockSupport();
+    long clusterId = 1;
+
+    final AmbariManagementController mockAmbariManagementController = easyMockSupport.createNiceMock(AmbariManagementController.class);
+    final AlertDefinitionDAO mockAlertDefinitionDAO = easyMockSupport.createNiceMock(AlertDefinitionDAO.class);
+    final Clusters mockClusters = easyMockSupport.createStrictMock(Clusters.class);
+    final Cluster mockClusterExpected = easyMockSupport.createNiceMock(Cluster.class);
+    final AlertDefinitionEntity mockAmsZookeeperProcessAlertDefinitionEntity = easyMockSupport.createNiceMock(AlertDefinitionEntity.class);
+
+    final Injector mockInjector = Guice.createInjector(new AbstractModule() {
+      @Override
+      protected void configure() {
+        bind(AmbariManagementController.class).toInstance(mockAmbariManagementController);
+        bind(Clusters.class).toInstance(mockClusters);
+        bind(EntityManager.class).toInstance(entityManager);
+        bind(AlertDefinitionDAO.class).toInstance(mockAlertDefinitionDAO);
+        bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
+        bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
+      }
+    });
+
+    expect(mockAmbariManagementController.getClusters()).andReturn(mockClusters).once();
+    expect(mockClusters.getClusters()).andReturn(new HashMap<String, Cluster>() {{
+      put("normal", mockClusterExpected);
+    }}).atLeastOnce();
+
+    expect(mockClusterExpected.getClusterId()).andReturn(clusterId).anyTimes();
+
+    expect(mockAlertDefinitionDAO.findByName(eq(clusterId), eq("ams_metrics_collector_zookeeper_server_process")))
+      .andReturn(mockAmsZookeeperProcessAlertDefinitionEntity).atLeastOnce();
+
+    mockAlertDefinitionDAO.remove(mockAmsZookeeperProcessAlertDefinitionEntity);
+    expectLastCall().once();
+
+    easyMockSupport.replayAll();
+    mockInjector.getInstance(UpgradeCatalog221.class).updateAlerts();
+    easyMockSupport.verifyAll();
+  }
 }
 }

+ 4 - 2
ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog222Test.java

@@ -78,19 +78,21 @@ public class UpgradeCatalog222Test {
   public void testExecuteDMLUpdates() throws Exception {
   public void testExecuteDMLUpdates() throws Exception {
     Method addNewConfigurationsFromXml = AbstractUpgradeCatalog.class.getDeclaredMethod("addNewConfigurationsFromXml");
     Method addNewConfigurationsFromXml = AbstractUpgradeCatalog.class.getDeclaredMethod("addNewConfigurationsFromXml");
     Method updateAlerts = UpgradeCatalog222.class.getDeclaredMethod("updateAlerts");
     Method updateAlerts = UpgradeCatalog222.class.getDeclaredMethod("updateAlerts");
-
+    Method updateStormConfigs = UpgradeCatalog222.class.getDeclaredMethod("updateStormConfigs");
 
 
 
 
     UpgradeCatalog222 upgradeCatalog222 = createMockBuilder(UpgradeCatalog222.class)
     UpgradeCatalog222 upgradeCatalog222 = createMockBuilder(UpgradeCatalog222.class)
             .addMockedMethod(addNewConfigurationsFromXml)
             .addMockedMethod(addNewConfigurationsFromXml)
             .addMockedMethod(updateAlerts)
             .addMockedMethod(updateAlerts)
+            .addMockedMethod(updateStormConfigs)
             .createMock();
             .createMock();
 
 
     upgradeCatalog222.addNewConfigurationsFromXml();
     upgradeCatalog222.addNewConfigurationsFromXml();
     expectLastCall().once();
     expectLastCall().once();
     upgradeCatalog222.updateAlerts();
     upgradeCatalog222.updateAlerts();
     expectLastCall().once();
     expectLastCall().once();
-
+    upgradeCatalog222.updateStormConfigs();
+    expectLastCall().once();
 
 
     replay(upgradeCatalog222);
     replay(upgradeCatalog222);
 
 

+ 55 - 0
ambari-server/src/test/python/TestGlobalLock.py

@@ -0,0 +1,55 @@
+# !/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+from resource_management.core import global_lock
+from resource_management.core.exceptions import Fail
+
+from unittest import TestCase
+
+utils = __import__('ambari_server.utils').utils
+
+class TestGlobalLock(TestCase):
+  def test_get_invalid_lock(self):
+    """
+    Tests that an invalid lock throws an exception
+    :return:
+    """
+    try:
+      global_lock.get_lock("INVALID")
+      self.fail("Expected an exception when trying to retrieve an invalid lock")
+    except Fail:
+      pass
+
+  def test_get_kerberos_lock(self):
+    """
+    Tests that the kerberos lock can be retrieved.
+    :return:
+    """
+    kerberos_lock = global_lock.get_lock(global_lock.LOCK_TYPE_KERBEROS)
+    self.assertFalse(kerberos_lock is None)
+
+    kerberos_lock_2 = global_lock.get_lock(global_lock.LOCK_TYPE_KERBEROS)
+    self.assertEqual(kerberos_lock, kerberos_lock_2)
+
+    kerberos_lock.acquire()
+    kerberos_lock.release()
+
+    kerberos_lock_2.acquire()
+    kerberos_lock_2.release()

+ 3 - 3
ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_client.py

@@ -55,7 +55,7 @@ class TestOozieClient(RMFTestCase):
     self.assertResourceCalled('Directory', '/etc/security/limits.d',
     self.assertResourceCalled('Directory', '/etc/security/limits.d',
                               owner = 'root',
                               owner = 'root',
                               group = 'root',
                               group = 'root',
-                              recursive=True,
+                              create_parents=True,
                               )
                               )
     self.assertResourceCalled('File', '/etc/security/limits.d/oozie.conf',
     self.assertResourceCalled('File', '/etc/security/limits.d/oozie.conf',
                               owner = 'root',
                               owner = 'root',
@@ -121,7 +121,7 @@ class TestOozieClient(RMFTestCase):
     self.assertResourceCalled('Directory', '/etc/security/limits.d',
     self.assertResourceCalled('Directory', '/etc/security/limits.d',
                               owner = 'root',
                               owner = 'root',
                               group = 'root',
                               group = 'root',
-                              recursive=True,
+                              create_parents=True,
                               )
                               )
     self.assertResourceCalled('File', '/etc/security/limits.d/oozie.conf',
     self.assertResourceCalled('File', '/etc/security/limits.d/oozie.conf',
                               owner = 'root',
                               owner = 'root',
@@ -193,7 +193,7 @@ class TestOozieClient(RMFTestCase):
     self.assertResourceCalled('Directory', '/etc/security/limits.d',
     self.assertResourceCalled('Directory', '/etc/security/limits.d',
                               owner = 'root',
                               owner = 'root',
                               group = 'root',
                               group = 'root',
-                              recursive=True,
+                              create_parents=True,
                               )
                               )
     self.assertResourceCalled('File', '/etc/security/limits.d/oozie.conf',
     self.assertResourceCalled('File', '/etc/security/limits.d/oozie.conf',
                               owner = 'root',
                               owner = 'root',

+ 79 - 38
ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py

@@ -108,7 +108,7 @@ class TestOozieServer(RMFTestCase):
     self.assertResourceCalled('Directory', '/etc/security/limits.d',
     self.assertResourceCalled('Directory', '/etc/security/limits.d',
                               owner = 'root',
                               owner = 'root',
                               group = 'root',
                               group = 'root',
-                              recursive=True,
+                              create_parents=True,
                               )
                               )
     self.assertResourceCalled('File', '/etc/security/limits.d/oozie.conf',
     self.assertResourceCalled('File', '/etc/security/limits.d/oozie.conf',
                               owner = 'root',
                               owner = 'root',
@@ -227,9 +227,11 @@ class TestOozieServer(RMFTestCase):
         not_if = "ambari-sudo.sh su oozie -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1'",
         not_if = "ambari-sudo.sh su oozie -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1'",
         sudo = True,
         sudo = True,
     )
     )
-    self.assertResourceCalled('Execute', ('chown', '-RL', 'oozie:hadoop', '/var/lib/oozie/oozie-server/conf'),
-        not_if = "ambari-sudo.sh su oozie -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1'",
-        sudo = True,
+    self.assertResourceCalled('Directory', '/var/lib/oozie/oozie-server/conf',
+                              owner = 'oozie',
+                              group = 'hadoop',
+                              recursion_follow_links = True,
+                              recursive_ownership = True,
     )
     )
     self.assertResourceCalled('File', '/tmp/mysql-connector-java.jar',
     self.assertResourceCalled('File', '/tmp/mysql-connector-java.jar',
         content = DownloadSource('http://c6401.ambari.apache.org:8080/resources//mysql-jdbc-driver.jar'),
         content = DownloadSource('http://c6401.ambari.apache.org:8080/resources//mysql-jdbc-driver.jar'),
@@ -263,8 +265,10 @@ class TestOozieServer(RMFTestCase):
         content = 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh prepare-war ',
         content = 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh prepare-war ',
         mode = 0644,
         mode = 0644,
     )
     )
-    self.assertResourceCalled('Execute', ('chown', '-R', 'oozie:hadoop', '/var/lib/oozie/oozie-server'),
-        sudo = True,
+    self.assertResourceCalled('Directory', '/var/lib/oozie/oozie-server',
+                              owner = 'oozie',
+                              group = 'hadoop',
+                              recursive_ownership = True,
     )
     )
 
 
 
 
@@ -327,7 +331,7 @@ class TestOozieServer(RMFTestCase):
     self.assertResourceCalled('Directory', '/etc/security/limits.d',
     self.assertResourceCalled('Directory', '/etc/security/limits.d',
                               owner = 'root',
                               owner = 'root',
                               group = 'root',
                               group = 'root',
-                              recursive=True,
+                              create_parents=True,
                               )
                               )
     self.assertResourceCalled('File', '/etc/security/limits.d/oozie.conf',
     self.assertResourceCalled('File', '/etc/security/limits.d/oozie.conf',
                               owner = 'root',
                               owner = 'root',
@@ -443,9 +447,11 @@ class TestOozieServer(RMFTestCase):
                               not_if = "ambari-sudo.sh su oozie -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1'",
                               not_if = "ambari-sudo.sh su oozie -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1'",
                               sudo = True,
                               sudo = True,
                               )
                               )
-    self.assertResourceCalled('Execute', ('chown', '-RL', u'oozie:hadoop', '/var/lib/oozie/oozie-server/conf'),
-                              not_if = "ambari-sudo.sh su oozie -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1'",
-                              sudo = True,
+    self.assertResourceCalled('Directory', '/var/lib/oozie/oozie-server/conf',
+                              owner = 'oozie',
+                              group = 'hadoop',
+                              recursion_follow_links = True,
+                              recursive_ownership = True,
                               )
                               )
     self.assertResourceCalled('File', '/tmp/sqla-client-jdbc.tar.gz',
     self.assertResourceCalled('File', '/tmp/sqla-client-jdbc.tar.gz',
                               content = DownloadSource('http://c6401.ambari.apache.org:8080/resources//sqlanywhere-jdbc-driver.tar.gz'),
                               content = DownloadSource('http://c6401.ambari.apache.org:8080/resources//sqlanywhere-jdbc-driver.tar.gz'),
@@ -481,9 +487,11 @@ class TestOozieServer(RMFTestCase):
                               content = 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh prepare-war ',
                               content = 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh prepare-war ',
                               mode = 0644,
                               mode = 0644,
                               )
                               )
-    self.assertResourceCalled('Execute', ('chown', '-R', u'oozie:hadoop', '/var/lib/oozie/oozie-server'),
-                              sudo = True,
-                              )
+    self.assertResourceCalled('Directory', '/var/lib/oozie/oozie-server',
+                              owner = 'oozie',
+                              group = 'hadoop',
+                              recursive_ownership = True,
+    )
     self.assertNoMoreResources()
     self.assertNoMoreResources()
 
 
 
 
@@ -698,7 +706,7 @@ class TestOozieServer(RMFTestCase):
     self.assertResourceCalled('Directory', '/etc/security/limits.d',
     self.assertResourceCalled('Directory', '/etc/security/limits.d',
                               owner = 'root',
                               owner = 'root',
                               group = 'root',
                               group = 'root',
-                              recursive=True,
+                              create_parents=True,
                               )
                               )
     self.assertResourceCalled('File', '/etc/security/limits.d/oozie.conf',
     self.assertResourceCalled('File', '/etc/security/limits.d/oozie.conf',
                               owner = 'root',
                               owner = 'root',
@@ -814,9 +822,11 @@ class TestOozieServer(RMFTestCase):
         not_if = "ambari-sudo.sh su oozie -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1'",
         not_if = "ambari-sudo.sh su oozie -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1'",
         sudo = True,
         sudo = True,
     )
     )
-    self.assertResourceCalled('Execute', ('chown', '-RL', 'oozie:hadoop', '/var/lib/oozie/oozie-server/conf'),
-        not_if = "ambari-sudo.sh su oozie -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1'",
-        sudo = True,
+    self.assertResourceCalled('Directory', '/var/lib/oozie/oozie-server/conf',
+                              owner = 'oozie',
+                              group = 'hadoop',
+                              recursion_follow_links = True,
+                              recursive_ownership = True,
     )
     )
     self.assertResourceCalled('Execute', 'ambari-sudo.sh cp /usr/lib/falcon/oozie/ext/falcon-oozie-el-extension-*.jar /usr/lib/oozie/libext',
     self.assertResourceCalled('Execute', 'ambari-sudo.sh cp /usr/lib/falcon/oozie/ext/falcon-oozie-el-extension-*.jar /usr/lib/oozie/libext',
         not_if = "ambari-sudo.sh su oozie -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1'",
         not_if = "ambari-sudo.sh su oozie -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1'",
@@ -836,8 +846,10 @@ class TestOozieServer(RMFTestCase):
         content = 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh prepare-war ',
         content = 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh prepare-war ',
         mode = 0644,
         mode = 0644,
     )
     )
-    self.assertResourceCalled('Execute', ('chown', '-R', 'oozie:hadoop', '/var/lib/oozie/oozie-server'),
-        sudo = True,
+    self.assertResourceCalled('Directory', '/var/lib/oozie/oozie-server',
+                              owner = 'oozie',
+                              group = 'hadoop',
+                              recursive_ownership = True,
     )
     )
 
 
 
 
@@ -888,7 +900,7 @@ class TestOozieServer(RMFTestCase):
     self.assertResourceCalled('Directory', '/etc/security/limits.d',
     self.assertResourceCalled('Directory', '/etc/security/limits.d',
                               owner = 'root',
                               owner = 'root',
                               group = 'root',
                               group = 'root',
-                              recursive=True,
+                              create_parents=True,
                               )
                               )
     self.assertResourceCalled('File', '/etc/security/limits.d/oozie.conf',
     self.assertResourceCalled('File', '/etc/security/limits.d/oozie.conf',
                               owner = 'root',
                               owner = 'root',
@@ -1004,9 +1016,11 @@ class TestOozieServer(RMFTestCase):
         not_if = "ambari-sudo.sh su oozie -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1'",
         not_if = "ambari-sudo.sh su oozie -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1'",
         sudo = True,
         sudo = True,
     )
     )
-    self.assertResourceCalled('Execute', ('chown', '-RL', 'oozie:hadoop', '/var/lib/oozie/oozie-server/conf'),
-        not_if = "ambari-sudo.sh su oozie -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1'",
-        sudo = True,
+    self.assertResourceCalled('Directory', '/var/lib/oozie/oozie-server/conf',
+                              owner = 'oozie',
+                              group = 'hadoop',
+                              recursion_follow_links = True,
+                              recursive_ownership = True,
     )
     )
     self.assertResourceCalled('Execute', 'ambari-sudo.sh cp /usr/lib/falcon/oozie/ext/falcon-oozie-el-extension-*.jar /usr/lib/oozie/libext',
     self.assertResourceCalled('Execute', 'ambari-sudo.sh cp /usr/lib/falcon/oozie/ext/falcon-oozie-el-extension-*.jar /usr/lib/oozie/libext',
         not_if = "ambari-sudo.sh su oozie -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1'",
         not_if = "ambari-sudo.sh su oozie -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1'",
@@ -1026,8 +1040,10 @@ class TestOozieServer(RMFTestCase):
         content = 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh prepare-war -secure',
         content = 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh prepare-war -secure',
         mode = 0644,
         mode = 0644,
     )
     )
-    self.assertResourceCalled('Execute', ('chown', '-R', 'oozie:hadoop', '/var/lib/oozie/oozie-server'),
-        sudo = True,
+    self.assertResourceCalled('Directory', '/var/lib/oozie/oozie-server',
+                              owner = 'oozie',
+                              group = 'hadoop',
+                              recursive_ownership = True,
     )
     )
 
 
     def test_configure_default_hdp22(self):
     def test_configure_default_hdp22(self):
@@ -1221,15 +1237,19 @@ class TestOozieServer(RMFTestCase):
 
 
     self.assertResourceCalled('Execute',
     self.assertResourceCalled('Execute',
       ('tar', '-zcvhf', '/tmp/oozie-upgrade-backup/oozie-conf-backup.tar', '/usr/hdp/current/oozie-server/conf/'),
       ('tar', '-zcvhf', '/tmp/oozie-upgrade-backup/oozie-conf-backup.tar', '/usr/hdp/current/oozie-server/conf/'),
-      sudo = True)
-
+      sudo = True,
+      tries = 3,
+      try_sleep = 1
+    )
     self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'oozie-server', u'2.2.1.0-2135'),
     self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'oozie-server', u'2.2.1.0-2135'),
       sudo = True )
       sudo = True )
 
 
     self.assertResourceCalled('Execute',
     self.assertResourceCalled('Execute',
       ('tar', '-xvf', '/tmp/oozie-upgrade-backup/oozie-conf-backup.tar', '-C', '/usr/hdp/current/oozie-server/conf//'),
       ('tar', '-xvf', '/tmp/oozie-upgrade-backup/oozie-conf-backup.tar', '-C', '/usr/hdp/current/oozie-server/conf//'),
-        sudo = True)
-
+      sudo = True,
+      tries = 3,
+      try_sleep = 1
+    )
     self.assertResourceCalled('Directory', '/tmp/oozie-upgrade-backup', action = ['delete'])
     self.assertResourceCalled('Directory', '/tmp/oozie-upgrade-backup', action = ['delete'])
     self.assertResourceCalled('Directory', '/usr/hdp/current/oozie-server/libext', mode = 0777)
     self.assertResourceCalled('Directory', '/usr/hdp/current/oozie-server/libext', mode = 0777)
     self.assertResourceCalled('Execute', ('cp', '/usr/share/HDP-oozie/ext-2.2.zip', '/usr/hdp/current/oozie-server/libext'), sudo=True)
     self.assertResourceCalled('Execute', ('cp', '/usr/share/HDP-oozie/ext-2.2.zip', '/usr/hdp/current/oozie-server/libext'), sudo=True)
@@ -1289,13 +1309,21 @@ class TestOozieServer(RMFTestCase):
 
 
     self.assertResourceCalled('Execute',
     self.assertResourceCalled('Execute',
       ('tar', '-zcvhf', '/tmp/oozie-upgrade-backup/oozie-conf-backup.tar', '/usr/hdp/current/oozie-server/conf/'),
       ('tar', '-zcvhf', '/tmp/oozie-upgrade-backup/oozie-conf-backup.tar', '/usr/hdp/current/oozie-server/conf/'),
-      sudo = True)
-
+      sudo = True,
+      tries = 3,
+      try_sleep = 1
+    )
+    self.assertResourceCalled('Link', '/etc/oozie/conf',
+                              to = '/usr/hdp/current/oozie-client/conf',
+    )
     self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'oozie-server', '2.3.0.0-1234'), sudo = True)
     self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'oozie-server', '2.3.0.0-1234'), sudo = True)
 
 
     self.assertResourceCalled('Execute',
     self.assertResourceCalled('Execute',
       ('tar', '-xvf', '/tmp/oozie-upgrade-backup/oozie-conf-backup.tar', '-C', '/usr/hdp/current/oozie-server/conf//'),
       ('tar', '-xvf', '/tmp/oozie-upgrade-backup/oozie-conf-backup.tar', '-C', '/usr/hdp/current/oozie-server/conf//'),
-      sudo = True)
+      sudo = True,
+      tries = 3,
+      try_sleep = 1
+    )
 
 
     self.assertResourceCalled('Directory', '/tmp/oozie-upgrade-backup', action = ['delete'])
     self.assertResourceCalled('Directory', '/tmp/oozie-upgrade-backup', action = ['delete'])
     self.assertResourceCalled('Directory', '/usr/hdp/current/oozie-server/libext', mode = 0777)
     self.assertResourceCalled('Directory', '/usr/hdp/current/oozie-server/libext', mode = 0777)
@@ -1347,13 +1375,18 @@ class TestOozieServer(RMFTestCase):
 
 
     self.assertResourceCalled('Execute',
     self.assertResourceCalled('Execute',
       ('tar', '-zcvhf', '/tmp/oozie-upgrade-backup/oozie-conf-backup.tar', '/usr/hdp/current/oozie-server/conf/'),
       ('tar', '-zcvhf', '/tmp/oozie-upgrade-backup/oozie-conf-backup.tar', '/usr/hdp/current/oozie-server/conf/'),
-      sudo = True)
-
+      sudo = True,
+      tries = 3,
+      try_sleep = 1
+    )
     self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'oozie-server', u'2.2.0.0-0000'), sudo = True)
     self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'oozie-server', u'2.2.0.0-0000'), sudo = True)
 
 
     self.assertResourceCalled('Execute',
     self.assertResourceCalled('Execute',
       ('tar', '-xvf', '/tmp/oozie-upgrade-backup/oozie-conf-backup.tar', '-C', '/usr/hdp/current/oozie-server/conf//'),
       ('tar', '-xvf', '/tmp/oozie-upgrade-backup/oozie-conf-backup.tar', '-C', '/usr/hdp/current/oozie-server/conf//'),
-      sudo = True)
+      sudo = True,
+      tries = 3,
+      try_sleep = 1
+    )
 
 
     self.assertResourceCalled('Directory', '/tmp/oozie-upgrade-backup', action = ['delete'])
     self.assertResourceCalled('Directory', '/tmp/oozie-upgrade-backup', action = ['delete'])
     self.assertResourceCalled('Directory', '/usr/hdp/current/oozie-server/libext',mode = 0777)
     self.assertResourceCalled('Directory', '/usr/hdp/current/oozie-server/libext',mode = 0777)
@@ -1547,13 +1580,21 @@ class TestOozieServer(RMFTestCase):
 
 
     self.assertResourceCalled('Execute',
     self.assertResourceCalled('Execute',
       ('tar', '-zcvhf', '/tmp/oozie-upgrade-backup/oozie-conf-backup.tar', '/usr/hdp/current/oozie-server/conf/'),
       ('tar', '-zcvhf', '/tmp/oozie-upgrade-backup/oozie-conf-backup.tar', '/usr/hdp/current/oozie-server/conf/'),
-      sudo = True)
-
+      sudo = True,
+      tries = 3,
+      try_sleep = 1
+    )
+    self.assertResourceCalled('Link', '/etc/oozie/conf',
+                              to = '/usr/hdp/current/oozie-client/conf',
+    )
     self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'oozie-server', '2.3.0.0-1234'), sudo = True)
     self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'oozie-server', '2.3.0.0-1234'), sudo = True)
 
 
     self.assertResourceCalled('Execute',
     self.assertResourceCalled('Execute',
       ('tar', '-xvf', '/tmp/oozie-upgrade-backup/oozie-conf-backup.tar', '-C', '/usr/hdp/current/oozie-server/conf//'),
       ('tar', '-xvf', '/tmp/oozie-upgrade-backup/oozie-conf-backup.tar', '-C', '/usr/hdp/current/oozie-server/conf//'),
-      sudo = True)
+      sudo = True,
+      tries = 3,
+      try_sleep = 1
+    )
 
 
     self.assertResourceCalled('Directory', '/tmp/oozie-upgrade-backup', action = ['delete'])
     self.assertResourceCalled('Directory', '/tmp/oozie-upgrade-backup', action = ['delete'])
     self.assertResourceCalled('Directory', '/usr/hdp/current/oozie-server/libext', mode = 0777)
     self.assertResourceCalled('Directory', '/usr/hdp/current/oozie-server/libext', mode = 0777)

+ 0 - 4
ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_service_check.py

@@ -91,7 +91,6 @@ class TestServiceCheck(RMFTestCase):
         dfs_type = '',
         dfs_type = '',
         action = ['delete_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         action = ['delete_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
-        dfs_type = '',
         type = 'directory',
         type = 'directory',
     )
     )
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa/examples',
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa/examples',
@@ -106,7 +105,6 @@ class TestServiceCheck(RMFTestCase):
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
         owner = 'ambari-qa',
         owner = 'ambari-qa',
-        dfs_type = '',
         group = 'hadoop'
         group = 'hadoop'
     )
     )
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa/input-data',
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa/input-data',
@@ -118,7 +116,6 @@ class TestServiceCheck(RMFTestCase):
         dfs_type = '',
         dfs_type = '',
         action = ['delete_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         action = ['delete_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
-        dfs_type = '',
         type = 'directory',
         type = 'directory',
     )
     )
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa/input-data',
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa/input-data',
@@ -133,7 +130,6 @@ class TestServiceCheck(RMFTestCase):
         hadoop_conf_dir = '/etc/hadoop/conf',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         type = 'directory',
         owner = 'ambari-qa',
         owner = 'ambari-qa',
-        dfs_type = '',
         group = 'hadoop'
         group = 'hadoop'
     )
     )
     self.assertResourceCalled('HdfsResource', None,
     self.assertResourceCalled('HdfsResource', None,

+ 11 - 0
ambari-server/src/test/python/stacks/2.3/common/services-normal-hawq-3-hosts.json

@@ -358,6 +358,17 @@
         "type" : "hawq-site.xml"
         "type" : "hawq-site.xml"
       },
       },
       "dependencies" : [ ]
       "dependencies" : [ ]
+    }, {
+      "href" : "/api/v1/stacks/HDP/versions/2.3/services/HAWQ/configurations/default_segment_num",
+      "StackConfigurations" : {
+        "property_depends_on" : [ ],
+        "property_name" : "default_segment_num",
+        "service_name" : "HAWQ",
+        "stack_name" : "HDP",
+        "stack_version" : "2.3",
+        "type" : "hawq-site.xml"
+      },
+      "dependencies" : [ ]
     } ]
     } ]
   }, {
   }, {
     "href" : "/api/v1/stacks/HDP/versions/2.3/services/HDFS",
     "href" : "/api/v1/stacks/HDP/versions/2.3/services/HDFS",

+ 34 - 0
ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py

@@ -1316,6 +1316,40 @@ class TestHDP23StackAdvisor(TestCase):
     self.stackAdvisor.recommendTezConfigurations(configurations, clusterData, services, hosts)
     self.stackAdvisor.recommendTezConfigurations(configurations, clusterData, services, hosts)
     self.assertEquals(configurations, expected)
     self.assertEquals(configurations, expected)
 
 
+  def test_recommendHAWQConfigurations(self):
+
+    # original cluster data with 3 segments
+    services = self.load_json("services-normal-hawq-3-hosts.json")
+    componentsListList = [service["components"] for service in services["services"]]
+    componentsList = [item for sublist in componentsListList for item in sublist]
+    hawqSegmentComponent = [component["StackServiceComponents"] for component in componentsList if component["StackServiceComponents"]["component_name"] == "HAWQSEGMENT"][0]
+    services["configurations"]["hawq-site"] = {"properties": {"default_segment_num": "24"}}
+
+    configurations = {}
+    clusterData = {}
+
+    # Test 1 - with 3 segments
+    self.assertEquals(len(hawqSegmentComponent["hostnames"]), 3)
+    self.stackAdvisor.recommendHAWQConfigurations(configurations, clusterData, services, None)
+    self.assertEquals(configurations["hawq-site"]["properties"]["default_segment_num"], str(3 * 6))
+
+    # Test 2 - with 49 segments
+    hawqSegmentComponent["hostnames"] = ["host" + str(i) for i in range(49)]
+    self.stackAdvisor.recommendHAWQConfigurations(configurations, clusterData, services, None)
+    self.assertEquals(configurations["hawq-site"]["properties"]["default_segment_num"], str(49 * 6))
+
+    # Test 3 - with 50 segments (threshold for new factor)
+    hawqSegmentComponent["hostnames"] = ["host" + str(i) for i in range(50)]
+    self.stackAdvisor.recommendHAWQConfigurations(configurations, clusterData, services, None)
+    self.assertEquals(configurations["hawq-site"]["properties"]["default_segment_num"], str(50 * 4))
+
+    # Test 4 - with no segments
+    configurations = {}
+    hawqSegmentComponent["hostnames"] = []
+    self.stackAdvisor.recommendHAWQConfigurations(configurations, clusterData, services, None)
+    self.assertEquals(configurations, {'hawq-site': {'properties': {}}})
+
+
   def test_validateHiveConfigurations(self):
   def test_validateHiveConfigurations(self):
     properties = {"hive_security_authorization": "None",
     properties = {"hive_security_authorization": "None",
                   "hive.exec.orc.default.stripe.size": "8388608",
                   "hive.exec.orc.default.stripe.size": "8388608",

+ 12 - 2
ambari-server/src/test/python/unitTests.py

@@ -20,6 +20,7 @@ import unittest
 import multiprocessing
 import multiprocessing
 import os
 import os
 import sys
 import sys
+import traceback
 from Queue import Empty
 from Queue import Empty
 from random import shuffle
 from random import shuffle
 import fnmatch
 import fnmatch
@@ -114,8 +115,17 @@ def stack_test_executor(base_folder, service, stack, custom_tests, executor_resu
   #TODO Add an option to randomize the tests' execution
   #TODO Add an option to randomize the tests' execution
   #shuffle(tests)
   #shuffle(tests)
   modules = [os.path.basename(s)[:-3] for s in tests]
   modules = [os.path.basename(s)[:-3] for s in tests]
-  suites = [unittest.defaultTestLoader.loadTestsFromName(name) for name in
-    modules]
+  try:
+    suites = [unittest.defaultTestLoader.loadTestsFromName(name) for name in
+      modules]
+  except:
+    executor_result.put({'exit_code': 1,
+                         'tests_run': 0,
+                         'errors': [("Failed to load test files {0}".format(str(modules)), traceback.format_exc(), "ERROR")],
+                         'failures': []})
+    executor_result.put(1)
+    return
+
   testSuite = unittest.TestSuite(suites)
   testSuite = unittest.TestSuite(suites)
   textRunner = unittest.TextTestRunner(verbosity=2).run(testSuite)
   textRunner = unittest.TextTestRunner(verbosity=2).run(testSuite)
 
 

+ 20 - 2
ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js

@@ -553,13 +553,15 @@ App.MainAdminStackAndUpgradeController = Em.Controller.extend(App.LocalStorage,
    * abort upgrade (in order to start Downgrade)
    * abort upgrade (in order to start Downgrade)
    */
    */
   abortUpgrade: function () {
   abortUpgrade: function () {
+    var errorCallback = this.get('isDowngrade') ? 'abortDowngradeErrorCallback' : 'abortUpgradeErrorCallback';
     return App.ajax.send({
     return App.ajax.send({
       name: 'admin.upgrade.abort',
       name: 'admin.upgrade.abort',
       sender: this,
       sender: this,
       data: {
       data: {
-        upgradeId: this.get('upgradeId')
+        upgradeId: this.get('upgradeId'),
+        isDowngrade: this.get('isDowngrade')
       },
       },
-      error: 'abortUpgradeErrorCallback'
+      error: errorCallback
     });
     });
   },
   },
 
 
@@ -579,6 +581,22 @@ App.MainAdminStackAndUpgradeController = Em.Controller.extend(App.LocalStorage,
     App.showAlertPopup(header, body);
     App.showAlertPopup(header, body);
   },
   },
 
 
+  /**
+   * error callback of <code>abortDowngrade()</code>
+   * @param {object} data
+   */
+  abortDowngradeErrorCallback: function (data) {
+    var header = Em.I18n.t('admin.stackDowngrade.state.paused.fail.header');
+    var body = Em.I18n.t('admin.stackDowngrade.state.paused.fail.body');
+    if(data && data.responseText){
+      try {
+        var json = $.parseJSON(data.responseText);
+        body = body + ' ' + json.message;
+      } catch (err) {}
+    }
+    App.showAlertPopup(header, body);
+  },
+
   retryUpgrade: function () {
   retryUpgrade: function () {
     this.setProperties({
     this.setProperties({
       requestInProgress: true,
       requestInProgress: true,

+ 41 - 0
ambari-web/app/controllers/main/alert_definitions_controller.js

@@ -28,12 +28,53 @@ App.MainAlertDefinitionsController = Em.ArrayController.extend({
    */
    */
   showFilterConditionsFirstLoad: false,
   showFilterConditionsFirstLoad: false,
 
 
+  contentUpdater: null,
+
   /**
   /**
    * List of all <code>App.AlertDefinition</code>
    * List of all <code>App.AlertDefinition</code>
    * @type {App.AlertDefinition[]}
    * @type {App.AlertDefinition[]}
    */
    */
   content: App.AlertDefinition.find(),
   content: App.AlertDefinition.find(),
 
 
+  /**
+   * Generates key for alert summary that represents current state
+   */
+  getSummaryCache: function () {
+    var res = '';
+    this.get('content').forEach(function(o) {
+      var summary = o.get('summary');
+      o.get('order').forEach(function (state) {
+        res += summary[state] ? summary[state].count + summary[state].maintenanceCount : 0;
+      });
+    });
+
+    return res;
+   },
+
+  generateCacheByKey: function(key) {
+    if (key === 'summary') {
+      return this.getSummaryCache();
+    }
+
+    return this.get('content').mapProperty(key).join('');
+  },
+
+  contentWasChanged: function(key) {
+    var updatedCache = this.generateCacheByKey(key);
+    if (this.get('cache.' + key) !== updatedCache) {
+      this.set('cache.' + key, updatedCache);
+      this.propertyDidChange('contentUpdater');
+    }
+  },
+
+  cache: {
+    'label': '',
+    'summary': '',
+    'serviceName': '',
+    'lastTriggered': '',
+    'enabled': ''
+  },
+
   /**
   /**
    * Enable/disable alertDefinition confirmation popup
    * Enable/disable alertDefinition confirmation popup
    * @param {object} event
    * @param {object} event

+ 17 - 13
ambari-web/app/controllers/main/host/combo_search_box.js

@@ -21,6 +21,7 @@ var App = require('app');
 App.MainHostComboSearchBoxController = Em.Controller.extend({
 App.MainHostComboSearchBoxController = Em.Controller.extend({
   name: 'mainHostComboSearchBoxController',
   name: 'mainHostComboSearchBoxController',
   currentSuggestion: [],
   currentSuggestion: [],
+  page_size: 10,
 
 
   VSCallbacks : {
   VSCallbacks : {
     search: function (query, searchCollection) {
     search: function (query, searchCollection) {
@@ -42,12 +43,12 @@ App.MainHostComboSearchBoxController = Em.Controller.extend({
     },
     },
 
 
     facetMatches: function (callback) {
     facetMatches: function (callback) {
-      console.log('called');
       callback([
       callback([
-        {label: 'name', category: 'Host'},
+        {label: 'host_name', category: 'Host'},
         {label: 'ip', category: 'Host'},
         {label: 'ip', category: 'Host'},
         {label: 'version', category: 'Host'},
         {label: 'version', category: 'Host'},
         {label: 'health', category: 'Host'},
         {label: 'health', category: 'Host'},
+        {label: 'rack', category: 'Host'},
         {label: 'service', category: 'Service'},
         {label: 'service', category: 'Service'},
         {label: 'component', category: 'Service'},
         {label: 'component', category: 'Service'},
         {label: 'state', category: 'Service'}
         {label: 'state', category: 'Service'}
@@ -57,13 +58,11 @@ App.MainHostComboSearchBoxController = Em.Controller.extend({
     valueMatches: function (facet, searchTerm, callback) {
     valueMatches: function (facet, searchTerm, callback) {
       var controller = App.router.get('mainHostComboSearchBoxController');
       var controller = App.router.get('mainHostComboSearchBoxController');
       switch (facet) {
       switch (facet) {
-        case 'name':
-          controller.getHostPropertySuggestions('name', searchTerm).done(function() {
-            callback(controller.get('currentSuggestion'));
-          });
-          break;
+        case 'host_name':
         case 'ip':
         case 'ip':
-          callback(App.Host.find().toArray().mapProperty('ip'));
+          controller.getPropertySuggestions(facet, searchTerm).done(function() {
+            callback(controller.get('currentSuggestion'), {preserveMatches: true});
+          });
           break;
           break;
         case 'rack':
         case 'rack':
           callback(App.Host.find().toArray().mapProperty('rack').uniq());
           callback(App.Host.find().toArray().mapProperty('rack').uniq());
@@ -104,18 +103,23 @@ App.MainHostComboSearchBoxController = Em.Controller.extend({
     }
     }
   },
   },
 
 
-  getHostPropertySuggestions: function(facet, searchTerm) {
+  getPropertySuggestions: function(facet, searchTerm) {
     return App.ajax.send({
     return App.ajax.send({
-      name: 'hosts.all.install',
+      name: 'hosts.with_searchTerm',
       sender: this,
       sender: this,
-      success: 'updateHostNameSuggestion',
+      data: {
+        facet: facet,
+        searchTerm: searchTerm,
+        page_size: this.get('page_size')
+      },
+      success: 'getPropertySuggestionsSuccess',
       error: 'commonSuggestionErrorCallback'
       error: 'commonSuggestionErrorCallback'
     });
     });
   },
   },
 
 
-  updateHostNameSuggestion: function(data) {
+  getPropertySuggestionsSuccess: function(data, opt, params) {
     this.updateSuggestion(data.items.map(function(item) {
     this.updateSuggestion(data.items.map(function(item) {
-      return item.Hosts.host_name;
+      return item.Hosts[params.facet];
     }));
     }));
   },
   },
 
 

+ 1 - 1
ambari-web/app/controllers/main/service/item.js

@@ -997,7 +997,7 @@ App.MainServiceItemController = Em.Controller.extend(App.SupportClientConfigsDow
       });
       });
     } else if (dependentServices.length > 0) {
     } else if (dependentServices.length > 0) {
       this.dependentServicesWarning(serviceName, dependentServices);
       this.dependentServicesWarning(serviceName, dependentServices);
-    } else if (App.Service.find(serviceName).get('workStatus') === 'INSTALLED') {
+    } else if (App.Service.allowUninstallStates.contains(App.Service.find(serviceName).get('workStatus'))) {
       App.showConfirmationPopup(
       App.showConfirmationPopup(
         function() {self.confirmDeleteService(serviceName)},
         function() {self.confirmDeleteService(serviceName)},
         Em.I18n.t('services.service.delete.popup.warning').format(displayName),
         Em.I18n.t('services.service.delete.popup.warning').format(displayName),

+ 1 - 1
ambari-web/app/controllers/wizard/step0_controller.js

@@ -73,12 +73,12 @@ App.WizardStep0Controller = Em.Controller.extend({
     if(App.router.nextBtnClickInProgress){
     if(App.router.nextBtnClickInProgress){
       return;
       return;
     }
     }
-    App.router.nextBtnClickInProgress = true;
     this.set('hasSubmitted', true);
     this.set('hasSubmitted', true);
     if (!this.get('invalidClusterName')) {
     if (!this.get('invalidClusterName')) {
       App.clusterStatus.set('clusterName', this.get('content.cluster.name'));
       App.clusterStatus.set('clusterName', this.get('content.cluster.name'));
       this.set('content.cluster.status', 'PENDING');
       this.set('content.cluster.status', 'PENDING');
       this.set('content.cluster.isCompleted', false);
       this.set('content.cluster.isCompleted', false);
+      App.router.nextBtnClickInProgress = true;
       App.router.send('next');
       App.router.send('next');
     }
     }
   }
   }

+ 2 - 1
ambari-web/app/controllers/wizard/step3_controller.js

@@ -1657,17 +1657,18 @@ App.WizardStep3Controller = Em.Controller.extend(App.ReloadPopupMixin, {
     if(App.router.nextBtnClickInProgress){
     if(App.router.nextBtnClickInProgress){
       return;
       return;
     }
     }
-    App.router.nextBtnClickInProgress = true;
     if (this.get('isHostHaveWarnings')) {
     if (this.get('isHostHaveWarnings')) {
       return App.showConfirmationPopup(
       return App.showConfirmationPopup(
         function () {
         function () {
           self.set('confirmedHosts', self.get('bootHosts'));
           self.set('confirmedHosts', self.get('bootHosts'));
+          App.router.nextBtnClickInProgress = true;
           App.router.send('next');
           App.router.send('next');
         },
         },
         Em.I18n.t('installer.step3.hostWarningsPopup.hostHasWarnings'));
         Em.I18n.t('installer.step3.hostWarningsPopup.hostHasWarnings'));
     }
     }
     else {
     else {
       this.set('confirmedHosts', this.get('bootHosts'));
       this.set('confirmedHosts', this.get('bootHosts'));
+      App.router.nextBtnClickInProgress = true;
       App.router.send('next');
       App.router.send('next');
     }
     }
     return null;
     return null;

+ 1 - 1
ambari-web/app/controllers/wizard/step7_controller.js

@@ -1638,6 +1638,7 @@ App.WizardStep7Controller = Em.Controller.extend(App.ServerValidatorMixin, App.E
    * Proceed to the next step
    * Proceed to the next step
    **/
    **/
   moveNext: function () {
   moveNext: function () {
+    App.router.nextBtnClickInProgress = true;
     App.router.send('next');
     App.router.send('next');
     this.set('submitButtonClicked', false);
     this.set('submitButtonClicked', false);
   },
   },
@@ -1651,7 +1652,6 @@ App.WizardStep7Controller = Em.Controller.extend(App.ServerValidatorMixin, App.E
     if (this.get('isSubmitDisabled') || App.router.nextBtnClickInProgress) {
     if (this.get('isSubmitDisabled') || App.router.nextBtnClickInProgress) {
       return false;
       return false;
     }
     }
-    App.router.nextBtnClickInProgress = true;
     var preInstallChecksController = App.router.get('preInstallChecksController');
     var preInstallChecksController = App.router.get('preInstallChecksController');
     if (this.get('supportsPreInstallChecks')) {
     if (this.get('supportsPreInstallChecks')) {
       if (preInstallChecksController.get('preInstallChecksWhereRun')) {
       if (preInstallChecksController.get('preInstallChecksWhereRun')) {

+ 7 - 1
ambari-web/app/mappers/alert_definition_summary_mapper.js

@@ -56,6 +56,9 @@ App.alertDefinitionSummaryMapper = App.QuickDataMapper.create({
 
 
     alertDefinitions.forEach(function (d) {
     alertDefinitions.forEach(function (d) {
       var id = d.get('id');
       var id = d.get('id');
+      if ((alertDefinitionsMap[id].get('stateManager.currentState.name') !== 'saved')) {
+        alertDefinitionsMap[id].get('stateManager').transitionTo('saved');
+      }
       alertDefinitionsMap[id].setProperties(summaryMap[id]);
       alertDefinitionsMap[id].setProperties(summaryMap[id]);
       if (!alertDefinitionsMap[id].get('enabled')) {
       if (!alertDefinitionsMap[id].get('enabled')) {
         // clear summary for disabled alert definitions
         // clear summary for disabled alert definitions
@@ -89,7 +92,10 @@ App.alertDefinitionSummaryMapper = App.QuickDataMapper.create({
         });
         });
       }
       }
     });
     });
-
+    if (!$.mocho) {
+      //for some reasons this causing error in unit test
+      App.store.commit();
+    }
     console.timeEnd('App.alertDefinitionSummaryMapper execution time');
     console.timeEnd('App.alertDefinitionSummaryMapper execution time');
 
 
   }
   }

+ 2 - 0
ambari-web/app/messages.js

@@ -1497,6 +1497,8 @@ Em.I18n.translations = {
   'admin.stackVersions.version.downgrade.suspended': "Downgrade: Paused",
   'admin.stackVersions.version.downgrade.suspended': "Downgrade: Paused",
   'admin.stackUpgrade.state.paused.fail.header': "Pause Upgrade failed",
   'admin.stackUpgrade.state.paused.fail.header': "Pause Upgrade failed",
   'admin.stackUpgrade.state.paused.fail.body': "Upgrade could not be paused. Try again later.",
   'admin.stackUpgrade.state.paused.fail.body': "Upgrade could not be paused. Try again later.",
+  'admin.stackDowngrade.state.paused.fail.header': "Pause Downgrade failed",
+  'admin.stackDowngrade.state.paused.fail.body': "Downgrade could not be paused. Try again later.",
 
 
   'admin.stackVersions.version.upgrade.upgradeOptions.header': "Upgrade Options",
   'admin.stackVersions.version.upgrade.upgradeOptions.header': "Upgrade Options",
   'admin.stackVersions.version.upgrade.upgradeOptions.bodyMsg.version': "You are about to perform an upgrade to <b>{0}</b>.",
   'admin.stackVersions.version.upgrade.upgradeOptions.bodyMsg.version': "You are about to perform an upgrade to <b>{0}</b>.",

+ 2 - 1
ambari-web/app/mixins/main/service/configs/config_overridable.js

@@ -139,7 +139,8 @@ App.ConfigOverridable = Em.Mixin.create({
             service_id: serviceId,
             service_id: serviceId,
             service_name: serviceId,
             service_name: serviceId,
             hosts: [],
             hosts: [],
-            desired_configs: []
+            desired_configs: [],
+            properties: []
           };
           };
           App.store.load(App.ServiceConfigGroup, newConfigGroup);
           App.store.load(App.ServiceConfigGroup, newConfigGroup);
           App.store.commit();
           App.store.commit();

+ 1 - 0
ambari-web/app/models/alerts/alert_definition.js

@@ -345,3 +345,4 @@ App.AlertDefinition.FIXTURES = [];
 App.AlertReportDefinition.FIXTURES = [];
 App.AlertReportDefinition.FIXTURES = [];
 App.AlertMetricsSourceDefinition.FIXTURES = [];
 App.AlertMetricsSourceDefinition.FIXTURES = [];
 App.AlertMetricsUriDefinition.FIXTURES = [];
 App.AlertMetricsUriDefinition.FIXTURES = [];
+App.AlertDefinitionParameter.FIXTURES = [];

+ 2 - 1
ambari-web/app/models/host_component.js

@@ -269,6 +269,7 @@ App.HostComponentActionMap = {
     var NN = ctx.get('controller.content.hostComponents').findProperty('componentName', 'NAMENODE');
     var NN = ctx.get('controller.content.hostComponents').findProperty('componentName', 'NAMENODE');
     var RM = ctx.get('controller.content.hostComponents').findProperty('componentName', 'RESOURCEMANAGER');
     var RM = ctx.get('controller.content.hostComponents').findProperty('componentName', 'RESOURCEMANAGER');
     var RA = ctx.get('controller.content.hostComponents').findProperty('componentName', 'RANGER_ADMIN');
     var RA = ctx.get('controller.content.hostComponents').findProperty('componentName', 'RANGER_ADMIN');
+    var HM = ctx.get('controller.content.hostComponents').findProperty('componentName', 'HAWQMASTER');
     var HS = ctx.get('controller.content.hostComponents').findProperty('componentName', 'HAWQSTANDBY');
     var HS = ctx.get('controller.content.hostComponents').findProperty('componentName', 'HAWQSTANDBY');
     return {
     return {
       RESTART_ALL: {
       RESTART_ALL: {
@@ -378,7 +379,7 @@ App.HostComponentActionMap = {
         context: Em.I18n.t('services.service.actions.run.immediateStopHawqCluster.context'),
         context: Em.I18n.t('services.service.actions.run.immediateStopHawqCluster.context'),
         label: Em.I18n.t('services.service.actions.run.immediateStopHawqCluster.context'),
         label: Em.I18n.t('services.service.actions.run.immediateStopHawqCluster.context'),
         cssClass: 'icon-stop',
         cssClass: 'icon-stop',
-        disabled: false
+        disabled: !HM || HM.get('workStatus') != App.HostComponentStatus.started
       },
       },
       IMMEDIATE_STOP: {
       IMMEDIATE_STOP: {
         customCommand: 'IMMEDIATE_STOP',
         customCommand: 'IMMEDIATE_STOP',

+ 43 - 0
ambari-web/app/models/service.js

@@ -158,6 +158,49 @@ App.Service = DS.Model.extend({
 
 
 });
 });
 
 
+/**
+ * Map of all service states
+ *
+ * @type {Object}
+ */
+App.Service.statesMap = {
+  init: 'INIT',
+  installing: 'INSTALLING',
+  install_failed: 'INSTALL_FAILED',
+  stopped: 'INSTALLED',
+  starting: 'STARTING',
+  started: 'STARTED',
+  stopping: 'STOPPING',
+  uninstalling: 'UNINSTALLING',
+  uninstalled: 'UNINSTALLED',
+  wiping_out: 'WIPING_OUT',
+  upgrading: 'UPGRADING',
+  maintenance: 'MAINTENANCE',
+  unknown: 'UNKNOWN'
+};
+
+/**
+ * @type {String[]}
+ */
+App.Service.inProgressStates = [
+  App.Service.statesMap.installing,
+  App.Service.statesMap.starting,
+  App.Service.statesMap.stopping,
+  App.Service.statesMap.uninstalling,
+  App.Service.statesMap.upgrading,
+  App.Service.statesMap.wiping_out
+];
+
+/**
+ * @type {String[]}
+ */
+App.Service.allowUninstallStates = [
+  App.Service.statesMap.init,
+  App.Service.statesMap.install_failed,
+  App.Service.statesMap.stopped,
+  App.Service.statesMap.unknown
+];
+
 App.Service.Health = {
 App.Service.Health = {
   live: "LIVE",
   live: "LIVE",
   dead: "DEAD-RED",
   dead: "DEAD-RED",

+ 4 - 4
ambari-web/app/templates/main/admin/stack_upgrade/stack_upgrade_wizard.hbs

@@ -81,7 +81,7 @@
                     <button class="btn btn-danger" {{bindAttr disabled="controller.requestInProgress"}} {{action confirmDowngrade view.failedItem target="controller"}}>{{t common.downgrade}}</button>
                     <button class="btn btn-danger" {{bindAttr disabled="controller.requestInProgress"}} {{action confirmDowngrade view.failedItem target="controller"}}>{{t common.downgrade}}</button>
                   {{/if}}
                   {{/if}}
                   {{#if isDowngrade}}
                   {{#if isDowngrade}}
-                    <button class="btn" {{action closeWizard target="view.parentView"}}>{{t admin.stackUpgrade.pauseDowngrade}}</button>
+                    <button class="btn" {{action pauseUpgrade target="view"}}>{{t admin.stackUpgrade.pauseDowngrade}}</button>
                   {{else}}
                   {{else}}
                     <button class="btn" {{action pauseUpgrade target="view"}}>{{t admin.stackUpgrade.pauseUpgrade}}</button>
                     <button class="btn" {{action pauseUpgrade target="view"}}>{{t admin.stackUpgrade.pauseUpgrade}}</button>
                   {{/if}}
                   {{/if}}
@@ -108,7 +108,7 @@
                 <button class="btn btn-danger" {{bindAttr disabled="controller.requestInProgress"}} {{action confirmDowngrade view.manualItem target="controller"}}>{{t common.downgrade}}</button>
                 <button class="btn btn-danger" {{bindAttr disabled="controller.requestInProgress"}} {{action confirmDowngrade view.manualItem target="controller"}}>{{t common.downgrade}}</button>
               {{/if}}
               {{/if}}
               {{#if isDowngrade}}
               {{#if isDowngrade}}
-                <button class="btn" {{action closeWizard target="view.parentView"}}>{{t admin.stackUpgrade.pauseDowngrade}}</button>
+                <button class="btn" {{action pauseUpgrade target="view"}}>{{t admin.stackUpgrade.pauseDowngrade}}</button>
               {{else}}
               {{else}}
                 <button class="btn" {{action pauseUpgrade target="view"}}>{{t admin.stackUpgrade.pauseUpgrade}}</button>
                 <button class="btn" {{action pauseUpgrade target="view"}}>{{t admin.stackUpgrade.pauseUpgrade}}</button>
               {{/if}}
               {{/if}}
@@ -189,7 +189,7 @@
                 <button class="btn btn-danger" {{bindAttr disabled="controller.requestInProgress"}} {{action confirmDowngrade view.manualItem target="controller"}}>{{t common.downgrade}}</button>
                 <button class="btn btn-danger" {{bindAttr disabled="controller.requestInProgress"}} {{action confirmDowngrade view.manualItem target="controller"}}>{{t common.downgrade}}</button>
               {{/if}}
               {{/if}}
               {{#if isDowngrade}}
               {{#if isDowngrade}}
-                <button class="btn" {{action closeWizard target="view.parentView"}}>{{t admin.stackUpgrade.pauseDowngrade}}</button>
+                <button class="btn" {{action pauseUpgrade target="view"}}>{{t admin.stackUpgrade.pauseDowngrade}}</button>
               {{else}}
               {{else}}
                 <button class="btn" {{action pauseUpgrade target="view"}}>{{t admin.stackUpgrade.pauseUpgrade}}</button>
                 <button class="btn" {{action pauseUpgrade target="view"}}>{{t admin.stackUpgrade.pauseUpgrade}}</button>
               {{/if}}
               {{/if}}
@@ -252,7 +252,7 @@
                   <button class="btn btn-danger" {{bindAttr disabled="controller.requestInProgress"}} {{action confirmDowngrade view.manualItem target="controller"}}>{{t common.downgrade}}</button>
                   <button class="btn btn-danger" {{bindAttr disabled="controller.requestInProgress"}} {{action confirmDowngrade view.manualItem target="controller"}}>{{t common.downgrade}}</button>
               {{/if}}
               {{/if}}
               {{#if isDowngrade}}
               {{#if isDowngrade}}
-                  <button class="btn" {{action closeWizard target="view.parentView"}}>{{t admin.stackUpgrade.pauseDowngrade}}</button>
+                  <button class="btn" {{action pauseUpgrade target="view"}}>{{t admin.stackUpgrade.pauseDowngrade}}</button>
               {{else}}
               {{else}}
                   <button class="btn" {{action pauseUpgrade target="view"}}>{{t admin.stackUpgrade.pauseUpgrade}}</button>
                   <button class="btn" {{action pauseUpgrade target="view"}}>{{t admin.stackUpgrade.pauseUpgrade}}</button>
               {{/if}}
               {{/if}}

+ 4 - 28
ambari-web/app/templates/main/alerts.hbs

@@ -57,39 +57,15 @@
               <span {{bindAttr title="alertDefinition.type"}} {{bindAttr class=":type-icon  alertDefinition.typeIconClass"}}></span>
               <span {{bindAttr title="alertDefinition.type"}} {{bindAttr class=":type-icon  alertDefinition.typeIconClass"}}></span>
               <a href="#" {{action "gotoAlertDetails" alertDefinition}}>{{alertDefinition.label}}</a>
               <a href="#" {{action "gotoAlertDetails" alertDefinition}}>{{alertDefinition.label}}</a>
             </td>
             </td>
-            <td class="alert-status">{{{alertDefinition.status}}}</td>
+            <td class="alert-status">
+              {{view App.AlertDefinitionSummary contentBinding="alertDefinition"}}
+            </td>
             <td class="alert-service">{{alertDefinition.serviceDisplayName}}</td>
             <td class="alert-service">{{alertDefinition.serviceDisplayName}}</td>
             <td class="alert-time">
             <td class="alert-time">
               <time class="timeago" {{bindAttr data-original-title="alertDefinition.lastTriggeredFormatted"}}>{{alertDefinition.lastTriggeredAgoFormatted}}</time>
               <time class="timeago" {{bindAttr data-original-title="alertDefinition.lastTriggeredFormatted"}}>{{alertDefinition.lastTriggeredAgoFormatted}}</time>
             </td>
             </td>
             <td class="last toggle-state-button alert-state">
             <td class="last toggle-state-button alert-state">
-              {{#if alertDefinition.enabled not=true}}
-                {{#isAuthorized "CLUSTER.TOGGLE_ALERTS"}}
-                  <a href="#" {{action "toggleState" alertDefinition target="controller"}} {{bindAttr class="alertDefinition.enabled:alert-definition-enable:alert-definition-disable"}}>
-                  <span class="enable-disable-button" {{bindAttr data-original-title="view.enabledTooltip"}}>
-                    {{view.enabledDisplay}}
-                  </span>
-                  </a>
-                {{/isAuthorized}}
-                {{#isNotAuthorized "CLUSTER.TOGGLE_ALERTS"}}
-                    <span {{bindAttr class="alertDefinition.enabled:alert-definition-enable:alert-definition-disable"}}>
-                      {{view.enabledDisplay}}
-                    </span>
-                {{/isNotAuthorized}}
-              {{else}}
-                {{#isAuthorized "CLUSTER.TOGGLE_ALERTS"}}
-                  <a href="#" {{action "toggleState" alertDefinition target="controller"}} {{bindAttr class="alertDefinition.enabled:alert-definition-enable:alert-definition-disable"}}>
-                  <span class="enable-disable-button" {{bindAttr data-original-title="view.disabledTooltip"}}>
-                    {{view.disabledDisplay}}
-                  </span>
-                  </a>
-                {{/isAuthorized}}
-                {{#isNotAuthorized "CLUSTER.TOGGLE_ALERTS"}}
-                  <span {{bindAttr class="alertDefinition.enabled:alert-definition-enable:alert-definition-disable"}}>
-                    {{view.disabledDisplay}}
-                  </span>
-                {{/isNotAuthorized}}
-              {{/if}}
+              {{view App.AlertDefinitionState contentBinding="alertDefinition"}}
             </td>
             </td>
           </tr>
           </tr>
         {{/each}}
         {{/each}}

+ 31 - 0
ambari-web/app/templates/main/alerts/alert_definition/alert_definition_state.hbs

@@ -0,0 +1,31 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+{{#isAuthorized "CLUSTER.TOGGLE_ALERTS"}}
+  <a href="#" {{action "toggleState" view.content target="controller"}}
+    {{bindAttr class="view.content.enabled:alert-definition-enable:alert-definition-disable"}}>
+      <span class="enable-disable-button" {{bindAttr data-original-title="view.tooltipText"}}>
+        {{view.labelText}}
+      </span>
+  </a>
+{{/isAuthorized}}
+{{#isNotAuthorized "CLUSTER.TOGGLE_ALERTS"}}
+  <span {{bindAttr class="view.content.enabled:alert-definition-enable:alert-definition-disable"}}>
+    {{view.labelText}}
+  </span>
+{{/isNotAuthorized}}

+ 28 - 0
ambari-web/app/templates/main/alerts/alert_definition/alert_definition_summary.hbs

@@ -0,0 +1,28 @@
+{{!
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+}}
+
+{{#if view.hostCount}}
+  {{#each state in view.states}}
+    <span {{bindAttr class=":alert-state-single-host :label state.stateClass"}}>
+      {{#if state.isMaintenance}}<span class="icon-medkit"></span>{{/if}}
+      {{state.shortStateWithCounter}}
+    </span>
+  {{/each}}
+{{else}}
+  <span class="alert-state-single-host label alert-state-PENDING">NONE</span>
+{{/if}}

+ 19 - 0
ambari-web/app/utils/ajax/ajax.js

@@ -1578,6 +1578,7 @@ var urls = {
     'Upgrade/progress_percent,Upgrade/request_context,Upgrade/request_status,Upgrade/direction,Upgrade/downgrade_allowed,' +
     'Upgrade/progress_percent,Upgrade/request_context,Upgrade/request_status,Upgrade/direction,Upgrade/downgrade_allowed,' +
     'upgrade_groups/UpgradeGroup,' +
     'upgrade_groups/UpgradeGroup,' +
     'upgrade_groups/upgrade_items/UpgradeItem/status,' +
     'upgrade_groups/upgrade_items/UpgradeItem/status,' +
+    'upgrade_groups/upgrade_items/UpgradeItem/display_status,' +
     'upgrade_groups/upgrade_items/UpgradeItem/context,' +
     'upgrade_groups/upgrade_items/UpgradeItem/context,' +
     'upgrade_groups/upgrade_items/UpgradeItem/group_id,' +
     'upgrade_groups/upgrade_items/UpgradeItem/group_id,' +
     'upgrade_groups/upgrade_items/UpgradeItem/progress_percent,' +
     'upgrade_groups/upgrade_items/UpgradeItem/progress_percent,' +
@@ -1665,6 +1666,9 @@ var urls = {
     'format': function (data) {
     'format': function (data) {
       return {
       return {
         data: JSON.stringify({
         data: JSON.stringify({
+          "RequestInfo": {
+            "downgrade": data.isDowngrade
+          },
           "Upgrade": {
           "Upgrade": {
             "request_status": "ABORTED"
             "request_status": "ABORTED"
           }
           }
@@ -2357,6 +2361,21 @@ var urls = {
     'real': '/clusters/{clusterName}/hosts?fields=Hosts/cpu_count,Hosts/disk_info,Hosts/total_mem,Hosts/os_type,Hosts/os_arch,Hosts/ip,host_components/HostRoles/state&minimal_response=true',
     'real': '/clusters/{clusterName}/hosts?fields=Hosts/cpu_count,Hosts/disk_info,Hosts/total_mem,Hosts/os_type,Hosts/os_arch,Hosts/ip,host_components/HostRoles/state&minimal_response=true',
     'mock': '/data/hosts/HDP2/hosts.json'
     'mock': '/data/hosts/HDP2/hosts.json'
   },
   },
+  'hosts.with_searchTerm': {
+    'real': '/clusters/{clusterName}/hosts?fields=Hosts/{facet}&minimal_response=true&page_size={page_size}',
+    'mock': '',
+    format: function (data) {
+      return {
+        headers: {
+          'X-Http-Method-Override': 'GET'
+        },
+        type: 'POST',
+        data: JSON.stringify({
+          "RequestInfo": {"query": (data.searchTerm ? "Hosts/"+ data.facet +".matches(.*" + data.searchTerm + ".*)" : "")}
+        })
+      };
+    }
+  },
   'host_components.all': {
   'host_components.all': {
     'real': '/clusters/{clusterName}/host_components?fields=HostRoles/host_name&minimal_response=true',
     'real': '/clusters/{clusterName}/host_components?fields=HostRoles/host_name&minimal_response=true',
     'mock': ''
     'mock': ''

+ 14 - 0
ambari-web/app/utils/ember_reopen.js

@@ -224,6 +224,20 @@ Em.View.reopen({
   }
   }
 });
 });
 
 
+Ember._HandlebarsBoundView.reopen({
+  /**
+   * overwritten set method of Ember._HandlebarsBoundView to avoid uncaught errors
+   * when trying to set property of destroyed view
+   */
+  render: function(buffer){
+    if(!this.get('isDestroyed') && !this.get('isDestroying')){
+      this._super(buffer);
+    } else {
+      console.debug('Calling set on destroyed view');
+    }
+  }
+});
+
 Ember.TextArea.reopen({
 Ember.TextArea.reopen({
   attributeBindings: ['readonly']
   attributeBindings: ['readonly']
 });
 });

+ 1 - 1
ambari-web/app/utils/validator.js

@@ -76,7 +76,7 @@ module.exports = {
    * @returns {boolean}
    * @returns {boolean}
    */
    */
   isValidDataNodeDir: function(value) {
   isValidDataNodeDir: function(value) {
-    var dirRegex = /^(\[[0-9a-zA-Z]+\])?(\/[0-9a-z]*)/;
+    var dirRegex = /^(\[[0-9a-zA-Z]+\])?(file:\/\/)?(\/[0-9a-z]*)/;
     var winRegex = /^(\[[0-9a-zA-Z]+\])?[a-zA-Z]:\\[0-9a-zA-Z]*/;
     var winRegex = /^(\[[0-9a-zA-Z]+\])?[a-zA-Z]:\\[0-9a-zA-Z]*/;
     var winUrlRegex = /^(\[[0-9a-zA-Z]+\])?file:\/\/\/[a-zA-Z]:\/[0-9a-zA-Z]*/;
     var winUrlRegex = /^(\[[0-9a-zA-Z]+\])?file:\/\/\/[a-zA-Z]:\/[0-9a-zA-Z]*/;
     var dirs = value.split(',');
     var dirs = value.split(',');

+ 2 - 0
ambari-web/app/views.js

@@ -95,6 +95,8 @@ require('views/login');
 require('views/main');
 require('views/main');
 require('views/main/menu');
 require('views/main/menu');
 require('views/main/alert_definitions_view');
 require('views/main/alert_definitions_view');
+require('views/main/alerts/alert_definition/alert_definition_summary');
+require('views/main/alerts/alert_definition/alert_definition_state');
 require('views/main/alerts/definition_details_view');
 require('views/main/alerts/definition_details_view');
 require('views/main/alerts/alert_definitions_actions_view');
 require('views/main/alerts/alert_definitions_actions_view');
 require('views/main/alerts/definition_configs_view');
 require('views/main/alerts/definition_configs_view');

+ 3 - 0
ambari-web/app/views/common/quick_view_link_view.js

@@ -538,6 +538,9 @@ App.QuickViewLinks = Em.View.extend({
       case "ATLAS":
       case "ATLAS":
         hosts = this.findHosts('ATLAS_SERVER', response);
         hosts = this.findHosts('ATLAS_SERVER', response);
         break;
         break;
+      case "MAPREDUCE2":
+        hosts = this.findHosts('HISTORYSERVER', response);
+        break;
       default:
       default:
         if (this.getWithDefault('content.hostComponents', []).someProperty('isMaster')) {
         if (this.getWithDefault('content.hostComponents', []).someProperty('isMaster')) {
           hosts = this.findHosts(this.get('content.hostComponents').findProperty('isMaster').get('componentName'), response);
           hosts = this.findHosts(this.get('content.hostComponents').findProperty('isMaster').get('componentName'), response);

+ 16 - 3
ambari-web/app/views/common/sort_view.js

@@ -119,7 +119,7 @@ var wrapperView = Em.View.extend({
         }
         }
       }, this);
       }, this);
     }
     }
-  }.observes('content.length'),
+  }.observes('controller.contentUpdater'),
 
 
   /**
   /**
    * reset all sorts fields
    * reset all sorts fields
@@ -185,12 +185,25 @@ var wrapperView = Em.View.extend({
     return func;
     return func;
   },
   },
 
 
+  /**
+   * method that runs <code>contentWasChanged<code>
+   *
+   * @method onContentChangeOnce
+   */
+  onContentChangeOnce: function() {
+    var keys = arguments[1].match(/[a-zA-Z]+$/),
+      key = keys.length ? keys[0] : null;
+    if (key) {
+      Em.run.once(this.get('controller'), 'contentWasChanged', key);
+    }
+  },
+
   /**
   /**
    * Add observer for key to call  <code>onContentChange</code>
    * Add observer for key to call  <code>onContentChange</code>
    * @param key
    * @param key
    */
    */
   addSortingObserver: function (key) {
   addSortingObserver: function (key) {
-    this.addObserver('content.@each.' + key, this, 'onContentChange');
+    this.addObserver('controller.content.@each.' + key, this, 'onContentChangeOnce');
   },
   },
 
 
   /**
   /**
@@ -198,7 +211,7 @@ var wrapperView = Em.View.extend({
    * @param key
    * @param key
    */
    */
   removeSortingObserver: function (key) {
   removeSortingObserver: function (key) {
-    this.removeObserver('content.@each.' + key, this, 'onContentChange');
+    this.removeObserver('controller.content.@each.' + key, this, 'onContentChangeOnce');
   },
   },
 
 
   willDestroyElement: function () {
   willDestroyElement: function () {

+ 18 - 6
ambari-web/app/views/common/table_view.js

@@ -359,9 +359,23 @@ App.TableView = Em.View.extend(App.UserPref, {
       };
       };
       this.get('filterConditions').push(filterCondition);
       this.get('filterConditions').push(filterCondition);
     }
     }
+
+    this.saveAllFilterConditions();
+  },
+
+  /**
+   * Save not empty <code>filterConditions</code> to the localStorage
+   *
+   * @method saveAllFilterConditions
+   */
+  saveAllFilterConditions: function () {
+    var filterConditions = this.get('filterConditions');
     // remove empty entries
     // remove empty entries
-    this.set('filterConditions', this.get('filterConditions').filter(function(item){ return !Em.isEmpty(item.value); }));
-    App.db.setFilterConditions(this.get('controller.name'), this.get('filterConditions'));
+    filterConditions = filterConditions.filter(function(item) {
+      return !Em.isEmpty(item.value);
+    });
+    this.set('filterConditions', filterConditions);
+    App.db.setFilterConditions(this.get('controller.name'), filterConditions);
   },
   },
 
 
   saveDisplayLength: function() {
   saveDisplayLength: function() {
@@ -487,10 +501,8 @@ App.TableView = Em.View.extend(App.UserPref, {
    */
    */
   clearFilters: function() {
   clearFilters: function() {
     this.set('filterConditions', []);
     this.set('filterConditions', []);
-    this.get('_childViews').forEach(function(childView) {
-      if (childView['clearFilter']) {
-        childView.clearFilter();
-      }
+    this.get('childViews').forEach(function(childView) {
+      Em.tryInvoke(childView, 'clearFilter');
     });
     });
   }
   }
 
 

+ 2 - 21
ambari-web/app/views/main/alert_definitions_view.js

@@ -52,6 +52,7 @@ App.MainAlertDefinitionsView = App.TableView.extend({
   },
   },
 
 
   willDestroyElement: function () {
   willDestroyElement: function () {
+    $(".timeago").tooltip('destroy');
     this.removeObserver('pageContent.length', this, 'tooltipsUpdater');
     this.removeObserver('pageContent.length', this, 'tooltipsUpdater');
   },
   },
 
 
@@ -78,26 +79,6 @@ App.MainAlertDefinitionsView = App.TableView.extend({
 
 
   colPropAssoc: ['', 'label', 'summary', 'serviceName', 'type', 'lastTriggered', 'enabled', 'groups'],
   colPropAssoc: ['', 'label', 'summary', 'serviceName', 'type', 'lastTriggered', 'enabled', 'groups'],
 
 
-  /**
-   * @type {string}
-   */
-  enabledTooltip: Em.I18n.t('alerts.table.state.enabled.tooltip'),
-
-  /**
-   * @type {string}
-   */
-  disabledTooltip: Em.I18n.t('alerts.table.state.disabled.tooltip'),
-
-  /**
-   * @type {string}
-   */
-  enabledDisplay: Em.I18n.t('alerts.table.state.enabled'),
-
-  /**
-   * @type {string}
-   */
-  disabledDisplay: Em.I18n.t('alerts.table.state.disabled'),
-
   sortView: sort.wrapperView.extend({
   sortView: sort.wrapperView.extend({
     didInsertElement: function () {
     didInsertElement: function () {
       this._super();
       this._super();
@@ -500,7 +481,7 @@ App.MainAlertDefinitionsView = App.TableView.extend({
    */
    */
   tooltipsUpdater: function () {
   tooltipsUpdater: function () {
     Em.run.next(this, function () {
     Em.run.next(this, function () {
-      App.tooltip($(".enable-disable-button, .timeago"));
+      App.tooltip($(".timeago"));
     });
     });
   },
   },
 
 

+ 34 - 0
ambari-web/app/views/main/alerts/alert_definition/alert_definition_state.js

@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+var App = require('app');
+
+App.AlertDefinitionState = Em.View.extend({
+
+  templateName: require('templates/main/alerts/alert_definition/alert_definition_state'),
+
+  labelText: Em.computed.ifThenElse('content.enabled', Em.I18n.t('alerts.table.state.enabled'), Em.I18n.t('alerts.table.state.disabled')),
+
+  tooltipText: Em.computed.ifThenElse('content.enabled', Em.I18n.t('alerts.table.state.enabled.tooltip'), Em.I18n.t('alerts.table.state.disabled.tooltip')),
+
+  didInsertElement: function () {
+    App.tooltip(this.$(".enable-disable-button"));
+  },
+  willDestroyElement:function () {
+    this.$(".enable-disable-button").tooltip('destroy');
+  }
+});

+ 65 - 0
ambari-web/app/views/main/alerts/alert_definition/alert_definition_summary.js

@@ -0,0 +1,65 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+var App = require('app');
+
+App.AlertDefinitionSummary = Em.View.extend({
+
+  templateName: require('templates/main/alerts/alert_definition/alert_definition_summary'),
+
+  didInsertElement: function() {
+    this.stateObserver();
+  },
+
+  hostCount: 0,
+  states: [],
+
+  stateObserver: function () {
+    var order = this.get('content.order'),
+      summary = this.get('content.summary'),
+      shortState = this.get('content.shortState');
+
+    var hostCnt = 0;
+    order.forEach(function (state) {
+      hostCnt += summary[state] ? summary[state].count + summary[state].maintenanceCount : 0;
+    });
+    var states = [];
+    if (hostCnt) {
+      order.forEach(function (state) {
+        if (summary[state]) {
+          if (summary[state].count) {
+            states.push({
+              'shortStateWithCounter': shortState[state] + (summary[state].count > 1 ? ' (' + summary[state].count + ')' : ''),
+              'isMaintenance': false,
+              'stateClass': 'alert-state-' + state
+            });
+          }
+          if (summary[state].maintenanceCount) {
+            states.push({
+              'shortStateWithCounter': shortState[state] + (summary[state].maintenanceCount > 1 ? ' (' + summary[state].maintenanceCount + ')' : ''),
+              'isMaintenance': true,
+              'stateClass': 'alert-state-PENDING'
+            });
+          }
+        }
+      }, this);
+    }
+    this.set('hostCount', hostCnt);
+    this.set('states', states);
+  }.observes('content.summary')
+
+});

+ 12 - 0
ambari-web/app/views/main/dashboard/config_history_view.js

@@ -207,6 +207,18 @@ App.MainConfigHistoryView = App.TableView.extend(App.TableServerViewMixin, {
     });
     });
   },
   },
 
 
+  /**
+   * Clear all filter values, update filter conditions in the localStorage and update table data with API-request
+   *
+   * @method clearFilters
+   * @override
+   */
+  clearFilters: function () {
+    this._super();
+    this.saveAllFilterConditions();
+    this.refresh();
+  },
+
   /**
   /**
    * callback executed after refresh call done
    * callback executed after refresh call done
    * @method refreshDone
    * @method refreshDone

Beberapa file tidak ditampilkan karena terlalu banyak file yang berubah dalam diff ini