Browse Source

Merge branch 'trunk' into branch-alerts-dev

Jonathan Hurley 11 years ago
parent
commit
05da121a58
100 changed files with 3776 additions and 1402 deletions
  1. 2 0
      ambari-admin/src/main/resources/ui/admin-web/app/index.html
  2. 1 0
      ambari-admin/src/main/resources/ui/admin-web/app/scripts/app.js
  3. 8 0
      ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/ambariViews/ViewsEditCtrl.js
  4. 8 0
      ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/clusters/ClustersManageAccessCtrl.js
  5. 9 0
      ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/groups/GroupsEditCtrl.js
  6. 13 0
      ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/users/UsersShowCtrl.js
  7. 199 0
      ambari-admin/src/main/resources/ui/admin-web/app/scripts/directives/editableList.js
  8. 6 0
      ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Group.js
  9. 7 0
      ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/User.js
  10. 210 2
      ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
  11. 2 27
      ambari-admin/src/main/resources/ui/admin-web/app/views/ambariViews/edit.html
  12. 5 26
      ambari-admin/src/main/resources/ui/admin-web/app/views/clusters/manageAccess.html
  13. 44 0
      ambari-admin/src/main/resources/ui/admin-web/app/views/directives/editableList.html
  14. 1 22
      ambari-admin/src/main/resources/ui/admin-web/app/views/groups/edit.html
  15. 4 4
      ambari-admin/src/main/resources/ui/admin-web/app/views/leftNavbar.html
  16. 1 21
      ambari-admin/src/main/resources/ui/admin-web/app/views/users/show.html
  17. 2 1
      ambari-admin/src/main/resources/ui/admin-web/bower.json
  18. 16 7
      ambari-common/src/main/python/resource_management/libraries/script/script.py
  19. 3 0
      ambari-server/conf/unix/ambari.properties
  20. 0 1
      ambari-server/src/main/java/org/apache/ambari/server/api/AmbariPersistFilter.java
  21. 3 3
      ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorHelper.java
  22. 27 0
      ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorRequestException.java
  23. 25 9
      ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorRunner.java
  24. 5 9
      ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommand.java
  25. 46 12
      ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
  26. 38 5
      ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariHandlerList.java
  27. 5 11
      ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementController.java
  28. 13 21
      ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
  29. 40 12
      ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
  30. 43 26
      ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
  31. 20 2
      ambari-server/src/main/java/org/apache/ambari/server/controller/FailsafeServletResponse.java
  32. 13 9
      ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ControllerResourceProvider.java
  33. 5 1
      ambari-server/src/main/java/org/apache/ambari/server/controller/internal/PrivilegeResourceProvider.java
  34. 7 2
      ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RecommendationResourceProvider.java
  35. 7 2
      ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ValidationResourceProvider.java
  36. 32 12
      ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewInstanceResourceProvider.java
  37. 6 2
      ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewPermissionResourceProvider.java
  38. 24 4
      ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewPrivilegeResourceProvider.java
  39. 3 3
      ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewVersionResourceProvider.java
  40. 28 2
      ambari-server/src/main/java/org/apache/ambari/server/orm/dao/GroupDAO.java
  41. 32 2
      ambari-server/src/main/java/org/apache/ambari/server/orm/dao/MemberDAO.java
  42. 20 7
      ambari-server/src/main/java/org/apache/ambari/server/orm/dao/PrincipalDAO.java
  43. 28 1
      ambari-server/src/main/java/org/apache/ambari/server/orm/dao/PrincipalTypeDAO.java
  44. 38 11
      ambari-server/src/main/java/org/apache/ambari/server/orm/dao/UserDAO.java
  45. 6 1
      ambari-server/src/main/java/org/apache/ambari/server/orm/entities/MemberEntity.java
  46. 1 1
      ambari-server/src/main/java/org/apache/ambari/server/orm/entities/PrincipalEntity.java
  47. 3 2
      ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UserEntity.java
  48. 9 0
      ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ViewEntity.java
  49. 89 0
      ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ViewInstanceEntity.java
  50. 1 1
      ambari-server/src/main/java/org/apache/ambari/server/security/SecurityFilter.java
  51. 26 9
      ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariAuthorizationFilter.java
  52. 1 1
      ambari-server/src/main/java/org/apache/ambari/server/security/authorization/User.java
  53. 156 2
      ambari-server/src/main/java/org/apache/ambari/server/security/authorization/Users.java
  54. 127 106
      ambari-server/src/main/java/org/apache/ambari/server/security/ldap/AmbariLdapDataPopulator.java
  55. 67 0
      ambari-server/src/main/java/org/apache/ambari/server/security/ldap/LdapBatchDto.java
  56. 113 0
      ambari-server/src/main/java/org/apache/ambari/server/security/ldap/LdapGroupDto.java
  57. 72 0
      ambari-server/src/main/java/org/apache/ambari/server/security/ldap/LdapSyncDto.java
  58. 133 0
      ambari-server/src/main/java/org/apache/ambari/server/security/ldap/LdapUserDto.java
  59. 82 0
      ambari-server/src/main/java/org/apache/ambari/server/security/ldap/LdapUserGroupMemberDto.java
  60. 2 1
      ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
  61. 4 0
      ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
  62. 3 3
      ambari-server/src/main/python/ambari-server.py
  63. 4 0
      ambari-server/src/main/resources/META-INF/persistence.xml
  64. 6 2
      ambari-server/src/main/resources/scripts/stack_advisor.py
  65. 4 19
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/params.py
  66. 6 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_client.py
  67. 15 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py
  68. 20 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/configuration/mapred-env.xml
  69. 7 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/client.py
  70. 13 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/params.py
  71. 1 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/metainfo.xml
  72. 11 8
      ambari-server/src/main/resources/stacks/HDP/1.3.2/services/stack_advisor.py
  73. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat.py
  74. 50 68
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive.py
  75. 6 3
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
  76. 20 17
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
  77. 4 4
      ambari-server/src/main/resources/stacks/HDP/2.1/services/stack_advisor.py
  78. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.2.1/repos/repoinfo.xml
  79. 29 15
      ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorRunnerTest.java
  80. 15 14
      ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommandTest.java
  81. 123 0
      ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ViewInstanceResourceProviderTest.java
  82. 133 0
      ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ViewPermissionResourceProviderTest.java
  83. 3 5
      ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ViewPrivilegeResourceProviderTest.java
  84. 17 0
      ambari-server/src/test/java/org/apache/ambari/server/orm/entities/ViewEntityTest.java
  85. 16 0
      ambari-server/src/test/java/org/apache/ambari/server/orm/entities/ViewInstanceEntityTest.java
  86. 9 68
      ambari-server/src/test/java/org/apache/ambari/server/security/ldap/AmbariLdapDataPopulatorTest.java
  87. 94 0
      ambari-server/src/test/java/org/apache/ambari/server/security/ldap/LdapPerformanceTest.java
  88. 39 0
      ambari-server/src/test/java/org/apache/ambari/server/view/ViewRegistryTest.java
  89. 59 0
      ambari-server/src/test/python/stacks/1.3.2/HDFS/test_hdfs_client.py
  90. 35 1
      ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_client.py
  91. 7 3
      ambari-server/src/test/python/stacks/1.3.2/configs/default.json
  92. 160 126
      ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py
  93. 217 196
      ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
  94. 255 249
      ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
  95. 156 1
      ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
  96. 159 145
      ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
  97. 132 0
      ambari-server/src/test/python/stacks/2.1/common/test_stack_advisor.py
  98. 2 1
      ambari-web/app/app.js
  99. 2 4
      ambari-web/app/assets/licenses/NOTICE.txt
  100. 0 42
      ambari-web/app/controllers/global/cluster_controller.js

+ 2 - 0
ambari-admin/src/main/resources/ui/admin-web/app/index.html

@@ -85,6 +85,7 @@
     <script src="bower_components/jquery/dist/jquery.js"></script>
     <script src="bower_components/bootstrap/dist/js/bootstrap.js"></script>
     <script src="bower_components/angular/angular.js"></script>
+    <script src="bower_components/angular-animate/angular-animate.js"></script>
     <script src="bower_components/angular-route/angular-route.js"></script>
     <script src="bower_components/angular-bootstrap/ui-bootstrap-tpls.js"></script>
     <script src="bower_components/lodash/dist/lodash.compat.js"></script>
@@ -126,6 +127,7 @@
     <script src="scripts/directives/linkToDir.js"></script>
     <script src="scripts/directives/PasswordVerify.js"></script>
     <script src="scripts/directives/disabledTooltip.js"></script>
+    <script src="scripts/directives/editableList.js"></script>
     <script src="scripts/services/User.js"></script>
     <script src="scripts/services/Group.js"></script>
     <script src="scripts/services/View.js"></script>

+ 1 - 0
ambari-admin/src/main/resources/ui/admin-web/app/scripts/app.js

@@ -19,6 +19,7 @@
 
 angular.module('ambariAdminConsole', [
   'ngRoute',
+  'ngAnimate',
   'ui.bootstrap',
   'restangular',
   'angularAlert',

+ 8 - 0
ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/ambariViews/ViewsEditCtrl.js

@@ -160,6 +160,14 @@ angular.module('ambariAdminConsole')
     $scope.editPermissionDisabled = true;
   };
 
+  $scope.$watch(function() {
+    return $scope.permissionsEdit;
+  }, function(newValue) {
+    if(newValue){
+      $scope.savePermissions();
+    }
+  }, true);  
+
   $scope.deleteInstance = function(instance) {
     ConfirmationModal.show('Delete View Instance', 'Are you sure you want to delete View Instance '+ instance.ViewInstanceInfo.label +'?').then(function() {
       View.deleteInstance(instance.ViewInstanceInfo.view_name, instance.ViewInstanceInfo.version, instance.ViewInstanceInfo.instance_name)

+ 8 - 0
ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/clusters/ClustersManageAccessCtrl.js

@@ -61,4 +61,12 @@ angular.module('ambariAdminConsole')
     });
     $scope.isEditMode = false;
   };
+
+  $scope.$watch(function() {
+    return $scope.permissionsEdit;
+  }, function(newValue) {
+    if(newValue){
+      $scope.save();
+    }
+  }, true);
 }]);

+ 9 - 0
ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/groups/GroupsEditCtrl.js

@@ -26,6 +26,14 @@ angular.module('ambariAdminConsole')
   $scope.dataLoaded = false;
   
   $scope.isMembersEditing = false;
+
+  $scope.$watch(function() {
+    return $scope.group.editingUsers;
+  }, function(newValue) {
+    if(newValue && !angular.equals(newValue, $scope.groupMembers)){
+      $scope.updateMembers();  
+    }
+  }, true);
   
   $scope.enableMembersEditing = function() {
     $scope.isMembersEditing = true;
@@ -49,6 +57,7 @@ angular.module('ambariAdminConsole')
   function loadMembers(){
     $scope.group.getMembers().then(function(members) {
       $scope.groupMembers = members;
+      $scope.group.editingUsers = angular.copy($scope.groupMembers);
     });
   }    
   

+ 13 - 0
ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/users/UsersShowCtrl.js

@@ -24,6 +24,7 @@ angular.module('ambariAdminConsole')
     User.get($routeParams.id).then(function(data) {
       $scope.user = data.Users;
       $scope.isCurrentUser = $scope.user.user_name === Auth.getCurrentUser();
+      $scope.editingGroupsList = angular.copy($scope.user.groups);
     });
   }
 
@@ -38,6 +39,18 @@ angular.module('ambariAdminConsole')
     $scope.editingGroupsList = angular.copy($scope.user.groups);
   };
 
+  $scope.$watch(function() {
+    return $scope.editingGroupsList;
+  }, function(newValue) {
+    if(newValue){
+      if( !angular.equals(newValue, $scope.user.groups) ){
+        console.log('Update!');
+        $scope.updateGroups();
+      }
+        
+    }
+  }, true);
+
   $scope.updateGroups = function() {
     var groups = $scope.editingGroupsList.toString().split(',').filter(function(item) {return item.trim();}).map(function(item) {return item.trim()});
     var diff = getDifference($scope.user.groups, groups);

+ 199 - 0
ambari-admin/src/main/resources/ui/admin-web/app/scripts/directives/editableList.js

@@ -0,0 +1,199 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+'use strict';
+
+angular.module('ambariAdminConsole')
+.directive('editableList', ['$q', '$document', function($q, $document) {
+  return {
+    restrict: 'E',
+    templateUrl: 'views/directives/editableList.html',
+    scope: {
+      itemsSource: '=',
+      resourceType: '@',
+      editable: '='
+    },
+    link: function($scope, $elem, $attr, $ctrl) {
+      var $editBox = $elem.find('[contenteditable]');
+
+      var readInput = function() {
+        $scope.$apply(function() {
+          $scope.input = $editBox.html();
+        });
+      };
+
+      $scope.$watch(function() {
+        return $scope.input;
+      }, function(newValue) {
+        if(newValue === ''){
+          $scope.clearInput();
+        }
+      });
+
+      $scope.clearInput = function() {
+        $editBox.html('').blur();
+      };
+
+      $scope.focusOnInput = function() {
+        setTimeout(function() {
+          var elem = $editBox[0];
+          var selection = window.getSelection(),
+              range = document.createRange();
+
+          elem.innerHTML = '\u00a0';
+          range.selectNodeContents(elem);
+          selection.removeAllRanges();
+          selection.addRange(range);
+          document.execCommand('delete', false, null);
+        }, 0);
+      };
+
+      $editBox.on('input', readInput);
+      $editBox.on('keydown', function(e) {
+        switch(e.which){
+          case 27: // ESC
+            $editBox.html('').blur();
+            readInput();
+            break;
+          case 13: // Enter
+            $scope.$apply(function() {
+              $scope.addItem();
+            });
+            return false;
+            break;
+          case 40: // Down arrow
+            $scope.downArrowHandler();
+            break;
+          case 38: // Up arrow
+            $scope.upArrowHandler();
+            break;
+        }
+      });
+    },
+    controller: ['$scope', '$injector', function($scope, $injector) {
+      var $resource = $injector.get($scope.resourceType);
+
+      $scope.identity = angular.identity; // Sorting function
+
+      $scope.items = angular.copy($scope.itemsSource);
+      $scope.editMode = false;
+      $scope.input = '';
+      $scope.typeahead = [];
+      $scope.selectedTypeahed = 0;
+
+      // Watch source of items
+      $scope.$watch(function() {
+        return $scope.itemsSource;
+      }, function(newValue) {
+        $scope.items = angular.copy($scope.itemsSource);
+      }, true);
+
+      // When input has changed - load typeahead items
+      $scope.$watch(function() {
+        return $scope.input;
+      }, function(newValue) {
+        if(newValue){
+          var newValue = newValue.split(',').filter(function(i){ 
+            i = i.replace('&nbsp;', ''); // Sanitize from spaces
+            return !!i.trim();
+          });
+          if( newValue.length > 1){
+            // If someone paste coma separated string, then just add all items to list
+            angular.forEach(newValue, function(item) {
+              $scope.addItem(item);
+            });
+            $scope.clearInput();
+            
+          } else {
+            // Load typeahed items based on current input
+            $resource.listByName(newValue).then(function(data) {
+              var items = [];
+              angular.forEach(data.data.items, function(item) {
+                var name;
+                if($scope.resourceType === 'User'){
+                  name = item.Users.user_name;
+                } else if($scope.resourceType === 'Group'){
+                  name = item.Groups.group_name;
+                }
+
+                if($scope.items.indexOf(name) < 0){ // Only if item not in list
+                  items.push(name);
+                }
+                $scope.typeahead = items.slice(0, 5);
+                $scope.selectedTypeahed = 0;
+              });
+            });
+          }
+
+            
+        } else {
+          $scope.typeahead = [];
+          $scope.selectedTypeahed = 0;
+        }
+      });
+
+      $scope.enableEditMode = function() {
+        if( $scope.editable && !$scope.editMode){
+          $scope.editMode = true;
+          if( $scope.items.length === 0){
+            $scope.focusOnInput();
+          }
+        }
+      };
+
+      $scope.cancel = function(event) {
+        $scope.editMode = false;
+        $scope.items = angular.copy($scope.itemsSource);
+        $scope.input = '';
+        event.stopPropagation();
+      };
+      $scope.save = function(event) {
+        $scope.itemsSource = $scope.items;
+        $scope.editMode = false;
+        $scope.input = '';
+        event.stopPropagation();
+      };
+
+
+      $scope.downArrowHandler = function() {
+        $scope.$apply(function() {
+          $scope.selectedTypeahed = ($scope.selectedTypeahed+1) % $scope.typeahead.length;
+        });
+      };
+      $scope.upArrowHandler = function() {
+        $scope.$apply(function() {
+          $scope.selectedTypeahed -= 1;
+          $scope.selectedTypeahed = $scope.selectedTypeahed < 0 ? $scope.typeahead.length-1 : $scope.selectedTypeahed;
+        });
+      };
+
+      $scope.addItem = function(item) {
+        item = item ? item : $scope.typeahead.length ? $scope.typeahead[$scope.selectedTypeahed] : $scope.input;
+        
+        if(item && $scope.items.indexOf(item) < 0){
+          $scope.items.push(item);
+          $scope.input = '';
+        }
+      };
+
+      $scope.removeFromItems = function(item) {
+        $scope.items.splice( $scope.items.indexOf(item), 1);
+      };
+    }]
+  };
+}]);
+

+ 6 - 0
ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Group.js

@@ -174,6 +174,12 @@ angular.module('ambariAdminConsole')
     return deferred.promise;
   };
 
+  Group.listByName = function(name) {
+    return $http.get(Settings.baseUrl + '/groups?'
+      + 'Groups/group_name.matches(.*'+name+'.*)'
+    );
+  };
+
   Group.getPrivilegies = function(groupId) {
     return $http.get(Settings.baseUrl + '/privileges', {
         params:{

+ 7 - 0
ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/User.js

@@ -46,6 +46,13 @@ angular.module('ambariAdminConsole')
         + (params.admin ? '&Users/admin=true' : '')
       );
     },
+    listByName: function(name) {
+      return $http.get(
+        Settings.baseUrl + '/users?'
+        + 'Users/user_name.matches(.*'+name+'.*)'
+        + '&from=0&page_size=20'
+      );
+    },
     get: function(userId) {
       return Restangular.one('users', userId).get();
     },

+ 210 - 2
ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css

@@ -16,6 +16,182 @@
  * limitations under the License.
  */
 
+
+
+/*
+  ------ START editable-list DIRECTIVE SECTION ------ -
+*/
+.editable-list-container.well{
+  padding: 10px;
+  position: relative;
+  margin-bottom: 30px;
+  cursor: pointer;
+}
+.editable-list-container.well.edit-mode{
+  cursor: default;
+}
+.editable-list-container.well.disabled{
+  background: white;
+}
+
+.editable-list-container .items-box{
+
+}
+.editable-list-container .items-box ul.items-list{
+  list-style-type: none;
+  margin: 0;
+  padding: 0;
+}
+
+.editable-list-container .items-box ul.items-list li.item{
+  display: inline-block;
+  padding: 4px 8px;
+  margin: 0 5px 5px 2px;
+  background: white;
+  border: 1px solid #ebebeb;
+  max-width: 100%;
+  white-space: nowrap;
+  position: relative;
+}
+
+.editable-list-container.edit-mode .items-box ul.items-list li.item{
+  padding-right: 25px;
+}
+
+.editable-list-container .items-box ul.items-list li.item.ng-leave-active{
+  display: none;
+}
+.editable-list-container .items-box ul.items-list li a{
+  text-decoration: none;
+}
+
+.editable-list-container .items-box ul.items-list li.item .close{
+  margin: -2px 0 0 5px;
+  width: 13px;
+  outline: none;
+  position: absolute;
+  display: none;
+}
+.editable-list-container.edit-mode .items-box ul.items-list li.item .close{
+  display: inline-block;
+}
+
+.editable-list-container .actions-panel{
+  position: absolute;
+  right: 5px;
+  bottom: -30px;
+  padding: 2px 5px 5px 5px;
+  background: #f5f5f5;
+  border: 1px solid #e3e3e3;
+  border-top: none;
+  border-radius: 0 0 4px 4px;
+
+  -webkit-transition: all 0.3s;
+  -o-transition: all 0.3s;
+  transition: all 0.3s;
+
+  -ms-transform-origin: 0% 0%; /* IE 9 */
+  -webkit-transform-origin: 0% 0%; /* Chrome, Safari, Opera */
+  transform-origin: 0% 0%;
+
+  -webkit-transform: rotateX(0deg);
+  -ms-transform: rotateX(0deg);
+  -o-transform: rotateX(0deg);
+  transform: rotateX(0deg); 
+}
+.editable-list-container .actions-panel.ng-hide{
+  -webkit-transform: rotateX(90deg);
+  -ms-transform: rotateX(90deg);
+  -o-transform: rotateX(90deg);
+  transform: rotateX(90deg);
+}
+
+.editable-list-container.edit-mode .items-box ul.items-list li.item.add-item-input.ng-hidden{
+  display: none !important;
+}
+.editable-list-container.edit-mode .items-box ul.items-list li.item.add-item-input{
+  display: inline-block!important;
+  outline: none;
+  max-width: 200px;
+  text-overflow: ellipsis;
+  white-space: nowrap;
+  position: relative;
+  padding-right: 8px;
+  -webkit-transition: none;
+  -o-transition: none;
+  transition: none;
+}
+
+.add-item-input span{
+  display: block;
+  outline: none;
+  min-width: 30px;
+  position: relative;
+  cursor: pointer;
+}
+.add-item-input span:focus{
+  cursor: default;
+}
+.editable-list-container .items-box ul.items-list li.item.add-item{
+  color: #ddd;
+}
+.add-item-input span:empty:before{
+  content: 'New';
+  position: absolute;
+  left: 0;
+  color: #ddd;
+}
+.add-item-input span:focus:before{
+  display: none;
+}
+.typeahead-box{
+  position: absolute;
+  left: 0;
+  margin-top: 5px;
+  background: white;
+  border: 1px solid #ebebeb;
+  z-index: 1000;
+  min-width: 65px;
+}
+.typeahead-box ul{
+  list-style-type: none;
+  margin: 0;
+  padding: 0;
+}
+.typeahead-box ul li{
+  padding: 3px 5px;
+  display: block;
+  cursor: pointer;
+}
+
+.typeahead-box ul li.selected, .typeahead-box ul li:hover{
+  background: #eee;
+}
+
+.editable-list-container.disabled .pencil-box{
+  display: none;
+}
+.editable-list-container .pencil-box{
+  position: absolute;
+  right: 5px;
+  top: 5px;
+  opacity: 0;
+  -webkit-transition: all 0.3s;
+  -o-transition: all 0.3s;
+  transition: all 0.3s;
+}
+.editable-list-container:hover .pencil-box{
+  opacity: 1;
+}
+.editable-list-container.edit-mode:hover .pencil-box{
+  opacity: 0;
+}
+
+/*
+  ------ END editable-list DIRECTIVE SECTION ------ -
+*/
+
+
  .instances-table{
   table-layout: fixed;
  }
@@ -229,17 +405,43 @@
 
 .left-navbar .panel{
   border-radius: 0;
+  font-size: 15px;
+}
+.left-navbar .panel-heading {
+  padding: 8px 15px;
+  font-weight: bold;
+}
+.left-navbar .panel-body {
+  padding: 5px 15px;
+}
+.left-navbar .panel-body #cluster-name input{
+  font-size: 17px;
+}
+.left-navbar .panel-body #cluster-name form{
+  margin-top: 4px;
+  margin-bottom: -10px;
+}
+.left-navbar .panel-body h4 .glyphicon{
+  font-size: 14px;
+}
+.left-navbar .panel-body #LDAP-button {
+  padding: 5px;
+}
+.left-navbar .panel-body hr{
+  margin-top: 5px;
+  margin-bottom: 5px;
 }
 .left-navbar .panel-body li{
   margin: 0 -15px;
 }
 .left-navbar .panel-body li a{
   border-radius: 0;
-  padding-left: 30px;
+  padding-left: 33px;
+  padding-top: 8px;
+  padding-bottom: 8px;
 }
 .left-navbar .panel-body li.active a{
   background: #666;
-  
 }
 
 .search-container{
@@ -721,6 +923,12 @@ input[type="submit"].btn.btn-mini {
   *padding-bottom: 1px;
 }
 
+button.btn.btn-xs{
+  padding: 1px 5px;
+  font-size: 12px;
+  line-height: 1.5;
+  border-radius: 3px;
+}
 .alert-info {
   background-color: #E6F1F6;
   border-color: #D2D9DD;

+ 2 - 27
ambari-admin/src/main/resources/ui/admin-web/app/views/ambariViews/edit.html

@@ -93,9 +93,6 @@
 <div class="panel panel-default views-permissions-panel" style="">
   <div class="panel-heading clearfix">
     <h3 class="panel-title pull-left">Permissions</h3>
-    <div class="pull-right" >
-      <a ng-hide="isPermissionsEmpty" href class="permissions-edit-toggle" ng-click="editPermissionDisabled = !editPermissionDisabled" ng-show="editPermissionDisabled"> <span class="glyphicon glyphicon-pencil"></span> Edit</a>
-    </div>
   </div>
   <div class="panel-body">
     
@@ -114,36 +111,14 @@
             <label class="">{{permission.PermissionInfo.permission_name}}</label>
           </td>
           <td>
-            <div class="" ng-switch="editPermissionDisabled">
-              <textarea name="" id="" cols="30" rows="4" class="form-control permission-textarea-user" ng-model="permissionsEdit[permission.PermissionInfo.permission_name].USER" ng-switch-when="false"></textarea>
-              <div class="well" ng-switch-when="true">
-                <span ng-repeat="user in permission.USER | orderBy:identity">
-                  <link-to route="users.show" id="{{user}}">{{user}}</link-to>
-                  {{$last ? '' :', '}}
-                </span>
-              </div>
-            </div>
+            <editable-list items-source="permissionsEdit[permission.PermissionInfo.permission_name].USER" editable="true" resource-type="User"></editable-list>
           </td>
           <td>
-            <div class="" ng-switch="editPermissionDisabled">
-              <textarea name="" id="" cols="30" rows="4" class="form-control permission-textarea-group" ng-model="permissionsEdit[permission.PermissionInfo.permission_name].GROUP" ng-switch-when="false"></textarea>
-              <div class="well" ng-switch-when="true">
-                <span ng-repeat="group in permission.GROUP | orderBy:identity">
-                  <link-to route="groups.edit" id="{{group}}" >{{group}}</link-to>
-                  {{$last ? '' :', '}}
-                </span>
-              </div>
-            </div>
+            <editable-list items-source="permissionsEdit[permission.PermissionInfo.permission_name].GROUP" editable="true" resource-type="Group" ></editable-list>
           </td>
         </tr>
       </tbody>
     </table>
-    <div class="form-group" ng-hide="editPermissionDisabled">
-      <div class="col-sm-offset-2 col-sm-10">
-        <button class="btn btn-primary pull-right left-margin permissions-save" ng-click="savePermissions()">Save</button>
-        <button class="btn btn-default pull-right permissions-cancel" ng-click="cancelPermissions()">Cancel</button>
-      </div>
-    </div>
     <div ng-show="isPermissionsEmpty">
       <div class="alert alert-info">There are no permissions defined for this view.</div>
     </div>

+ 5 - 26
ambari-admin/src/main/resources/ui/admin-web/app/views/clusters/manageAccess.html

@@ -21,17 +21,14 @@
     <ol class="breadcrumb pull-left">
       <li class="active">{{clusterName}} Permissions</li>
     </ol>
-    <div class="pull-right top-margin-4">
-      <a href class="btn btn-primary"  ng-hide="isEditMode" ng-click="toggleEditMode()"><span class="glyphicon glyphicon-pencil"></span> Edit</a>
-    </div>
   </div>
   <hr>
   <table class="table">
     <thead>
       <tr>
-        <th class="col-sm-2"><label>Permission</label></th>
-        <th><label>Grant permission to these users</label></th>
-        <th><label>Grant permission to these groups</label></th>
+        <th class="col-sm-2" width="20%"><label>Permission</label></th>
+        <th class="col-sm-5" width="40%"><label>Grant permission to these users</label></th>
+        <th class="col-sm-5" width="40%"><label>Grant permission to these groups</label></th>
       </tr>
     </thead>
     <tbody>
@@ -39,33 +36,15 @@
         <td><label class="">{{permission.PermissionInfo.permission_name}}</label></td>
         <td>
           <div class="" ng-switch="isEditMode">
-            <textarea ng-switch-when="true" name="" id="" cols="30" rows="4" class="form-control permission-user-input" ng-model="permissionsEdit[permission.PermissionInfo.permission_name].USER"></textarea>
-            <div class="well" ng-switch-default>
-              <span ng-repeat="user in permission.USER | orderBy:identity">
-                <link-to route="users.show" id="{{user}}" >{{user}}</link-to>
-                {{$last ? '' :', '}}
-              </span>
-            </div>
+            <editable-list items-source="permissionsEdit[permission.PermissionInfo.permission_name].USER" resource-type="User" editable="true"></editable-list>
           </div>
         </td>
         <td>
           <div class="" ng-switch="isEditMode">
-            <textarea ng-switch-when="true" name="" id="" cols="30" rows="4" class="form-control permission-group-input" ng-model="permissionsEdit[permission.PermissionInfo.permission_name].GROUP | orderBy:identity"></textarea>
-            <div class="well" ng-switch-default>
-              <span ng-repeat="group in permission.GROUP">
-                <link-to route="groups.edit" id="{{group}}">{{group}}</link-to>
-                {{$last ? '' :', '}}
-              </span>
-            </div>
+            <editable-list items-source="permissionsEdit[permission.PermissionInfo.permission_name].GROUP" resource-type="Group" editable="true"></editable-list>
           </div>
         </td>
       </tr>
     </tbody>
   </table>
-  <div class="form-group" ng-show="isEditMode">
-    <div class="col-sm-offset-2 col-sm-10">
-      <button class="btn btn-primary pull-right permission-save left-margin" ng-click="save()">Save</button>
-      <button class="btn btn-default pull-right permissions-cancel" ng-click="cancel()">Cancel</button>
-    </div>
-  </div>
 </div>

+ 44 - 0
ambari-admin/src/main/resources/ui/admin-web/app/views/directives/editableList.html

@@ -0,0 +1,44 @@
+<!--
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+-->
+
+<div class="editable-list-container well" ng-class="{'edit-mode' : editMode, 'disabled' : !editable}" ng-click="enableEditMode()">
+  <div class="items-box">
+    <ul class="items-list">
+      <li class="item" ng-repeat="item in items | orderBy:identity"><span><a href>{{item}}</a><button ng-click="removeFromItems(item)" type="button" class="close"><span aria-hidden="true">&times;</span><span class="sr-only">Close</span></button></span></li><li class="item add-item-input" ng-show="editMode">
+        <span contenteditable></span>
+        <div class="typeahead-box" ng-show="typeahead.length != 0">
+          <ul>
+            <li ng-repeat="item in typeahead" ng-click="addItem(item)" ng-class="{'selected' : $index == selectedTypeahed}">{{item}}</li>
+          </ul>
+        </div>
+      </li>
+      <li class="item add-item" ng-show="!editMode && !items.length">Add {{resourceType}}</li>
+    </ul>
+  </div>
+  <div class="actions-panel" ng-show="editMode">
+    <button class="btn btn-default btn-xs cancel" ng-click="cancel($event)">
+      <span class="glyphicon glyphicon-remove cancel"></span>
+    </button>
+    <button class="btn btn-primary btn-xs" ng-click="save($event)">
+      <span class="glyphicon glyphicon-ok"></span>
+    </button>
+  </div>
+  <div class="pencil-box">
+    <span class="glyphicon glyphicon-pencil"></span>
+  </div>
+</div>

+ 1 - 22
ambari-admin/src/main/resources/ui/admin-web/app/views/groups/edit.html

@@ -40,28 +40,7 @@
   <div class="form-group">
     <label for="members" class="col-sm-2 control-label">{{group.ldap_group ? 'LDAP Members' : 'Local Members'}}</label>
     <div class="col-sm-10">
-      <div class="row" ng-hide="isMembersEditing">
-        <div class="col-sm-10">
-          <div class="well users">
-            <span ng-repeat="member in groupMembers" >
-              <link-to route='users.show' id="{{member}}">
-                {{member}}
-              </link-to>
-              {{$last ? '' : ', '}}
-            </span>
-          </div>
-        </div>
-        <div class="col-sm-2">
-          <a href ng-click="enableMembersEditing()" ng-hide="user.ldap_group"><span class="glyphicon glyphicon-pencil"></span> Edit</a>
-        </div>
-      </div>
-      <div class="row" ng-show="isMembersEditing">
-        <div class="col-sm-12">
-          <textarea name="groups" id="" cols="30" rows="5" class="form-control bottom-margin usergroups" ng-model="group.editingUsers"></textarea>
-          <a href class="btn btn-primary pull-right left-margin updategroups" ng-click="updateMembers()">Save</a>
-          <button class="btn btn-default pull-right cancel-groups-update" ng-click="cancelUpdate()">Cancel</button>
-        </div>
-      </div>
+      <editable-list items-source="group.editingUsers" resource-type="User" editable="!group.ldap_group"></editable-list>
     </div>
   </div>
 

+ 4 - 4
ambari-admin/src/main/resources/ui/admin-web/app/views/leftNavbar.html

@@ -20,10 +20,10 @@
     <div class="panel-heading"><span class="glyphicon glyphicon-cloud"></span> Clusters</div>
     <div class="panel-body">
       <div ng-show="cluster">
-        <div ng-switch on="editCluster.editingName">
-          <h5 ng-switch-when="false">{{cluster.Clusters.cluster_name}}
+        <div id="cluster-name"  ng-switch on="editCluster.editingName">
+          <h4 ng-switch-when="false">{{cluster.Clusters.cluster_name}}
             <i ng-click="toggleEditName()" class="glyphicon glyphicon-edit pull-right edit-cluster-name" tooltip="Rename Cluster"></i>
-          </h5>
+          </h4>
 
           <form ng-keyup="toggleEditName($event)" tabindex="1" name="editClusterNameForm" class="editClusterNameForm" ng-switch-when="true"
                 ng-submit="editCluster.name !== cluster.Clusters.cluster_name && editClusterNameForm.newClusterName.$valid && confirmClusterNameChange()">
@@ -89,7 +89,7 @@
         <li ng-class="{active: isActive('groups.list')}"><link-to route="groups.list" class="groupslist-link">Groups</link-to></li>
       </ul>
       <hr>
-      <div ng-switch="isLDAPConfigured">
+      <div id="LDAP-button" ng-switch="isLDAPConfigured">
         <a ng-switch-when="true" href class="btn btn-primary btn-block syncldapbtn" ng-click="syncLDAP()">
           <span class="glyphicon glyphicon-transfer pulldown2"></span> Sync LDAP
         </a>

+ 1 - 21
ambari-admin/src/main/resources/ui/admin-web/app/views/users/show.html

@@ -62,27 +62,7 @@
     <div class="form-group">
       <label for="groups" class="col-sm-2 control-label">{{user.ldap_user ? 'LDAP Group Membership' : 'Local Group Membership'}}</label>
       <div class="col-sm-10">
-        <div class="row" ng-hide="isGroupEditing">
-          <div class="col-sm-10">
-            <div class="well">
-              <span ng-repeat="group in user.groups">
-                <a href="#/groups/{{group}}/edit" >{{group}}</a>
-                {{$last ? '' : ', '}}
-              </span>
-            </div>
-          </div>
-          <div class="col-sm-2">
-            <a href ng-click="enableGroupEditing()" ng-hide="user.ldap_user"><span class="glyphicon glyphicon-pencil"></span> Edit</a>
-          </div>
-        </div>
-        <div class="row" ng-show="isGroupEditing">
-          <div class="col-sm-12">
-            <textarea name="groups" id="" cols="30" rows="5" class="form-control bottom-margin usergroups" ng-model="editingGroupsList"></textarea>
-            <a href class="btn btn-primary pull-right left-margin updategroups" ng-click="updateGroups()">Save</a>
-            <button class="btn btn-default pull-right cancel-groups-update" ng-click="cancelUpdate()">Cancel</button>
-          </div>
-            
-        </div>
+        <editable-list items-source="editingGroupsList" resource-type="Group" editable="!user.ldap_user"></editable-list>
       </div>
         
     </div>

+ 2 - 1
ambari-admin/src/main/resources/ui/admin-web/bower.json

@@ -7,7 +7,8 @@
     "angular-route": "~1.2.18",
     "angular-bootstrap": "~0.11.0",
     "restangular": "~1.4.0",
-    "angular-bootstrap-toggle-switch": "~0.5.1"
+    "angular-bootstrap-toggle-switch": "~0.5.1",
+    "angular-animate": "~1.2.23"
   },
   "devDependencies": {}
 }

+ 16 - 7
ambari-common/src/main/python/resource_management/libraries/script/script.py

@@ -237,6 +237,20 @@ class Script(object):
     """
     self.fail_with_error('configure method isn\'t implemented')
 
+  def generate_configs_get_template_file_content(self, filename, dicts):
+    import params
+    content = ''
+    for dict in dicts.split(','):
+      if dict.strip() in params.config['configurations']:
+        content += params.config['configurations'][dict.strip()]['content']
+
+    return content
+
+  def generate_configs_get_xml_file_content(self, filename, dict):
+    import params
+    return {'configurations':params.config['configurations'][dict],
+            'configuration_attributes':params.config['configuration_attributes'][dict]}
+
   def generate_configs(self, env):
     """
     Generates config files and stores them as an archive in tmp_dir
@@ -254,17 +268,12 @@ class Script(object):
       for filename, dict in file_dict.iteritems():
         XmlConfig(filename,
                   conf_dir=conf_tmp_dir,
-                  configurations=params.config['configurations'][dict],
-                  configuration_attributes=params.config['configuration_attributes'][dict],
+                  **self.generate_configs_get_xml_file_content(filename, dict)
         )
     for file_dict in env_configs_list:
       for filename,dicts in file_dict.iteritems():
-        content = ''
-        for dict in dicts.split(','):
-          if dict.strip() in params.config['configurations']:
-            content += params.config['configurations'][dict.strip()]['content']
         File(os.path.join(conf_tmp_dir, filename),
-             content=InlineTemplate(content))
+             content=InlineTemplate(self.generate_configs_get_template_file_content(filename, dicts)))
     with closing(tarfile.open(output_filename, "w:gz")) as tar:
       tar.add(conf_tmp_dir, arcname=os.path.basename("."))
       tar.close()

+ 3 - 0
ambari-server/conf/unix/ambari.properties

@@ -52,3 +52,6 @@ agent.threadpool.size.max=25
 
 # linux open-file limit
 ulimit.open.files=10000
+
+# Server HTTP settings
+server.http.session.inactive_timeout=60

+ 0 - 1
ambari-server/src/main/java/org/apache/ambari/server/api/AmbariPersistFilter.java

@@ -19,7 +19,6 @@ package org.apache.ambari.server.api;
 
 import com.google.inject.Inject;
 import com.google.inject.Singleton;
-import com.google.inject.persist.PersistService;
 import com.google.inject.persist.UnitOfWork;
 
 import javax.servlet.*;

+ 3 - 3
ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorHelper.java

@@ -59,7 +59,7 @@ public class StackAdvisorHelper {
    * Returns validation (component-layout or configurations) result for the
    * request.
    * 
-   * @param validationRequest the validation request
+   * @param request the validation request
    * @return {@link ValidationResponse} instance
    * @throws StackAdvisorException in case of stack advisor script errors
    */
@@ -83,7 +83,7 @@ public class StackAdvisorHelper {
       command = new GetConfigurationValidationCommand(recommendationsDir, stackAdvisorScript,
           requestId, saRunner, metaInfo);
     } else {
-      throw new StackAdvisorException(String.format("Unsupported request type, type=%s",
+      throw new StackAdvisorRequestException(String.format("Unsupported request type, type=%s",
           requestType));
     }
 
@@ -118,7 +118,7 @@ public class StackAdvisorHelper {
       command = new GetConfigurationRecommnedationCommand(recommendationsDir, stackAdvisorScript,
           requestId, saRunner, metaInfo);
     } else {
-      throw new StackAdvisorException(String.format("Unsupported request type, type=%s",
+      throw new StackAdvisorRequestException(String.format("Unsupported request type, type=%s",
           requestType));
     }
 

+ 27 - 0
ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorRequestException.java

@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.api.services.stackadvisor;
+
+@SuppressWarnings("serial")
+public class StackAdvisorRequestException extends StackAdvisorException {
+
+  public StackAdvisorRequestException(String message) {
+    super(message);
+  }
+}

+ 25 - 9
ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorRunner.java

@@ -41,10 +41,9 @@ public class StackAdvisorRunner {
    * @param script stack advisor script
    * @param saCommandType {@link StackAdvisorCommandType} to run.
    * @param actionDirectory directory for the action
-   * @return {@code true} if script completed successfully, {@code false}
-   *         otherwise.
    */
-  public boolean runScript(String script, StackAdvisorCommandType saCommandType, File actionDirectory) {
+  public void runScript(String script, StackAdvisorCommandType saCommandType, File actionDirectory)
+      throws StackAdvisorException {
     LOG.info(String.format("Script=%s, actionDirectory=%s, command=%s", script, actionDirectory,
         saCommandType));
 
@@ -62,23 +61,40 @@ public class StackAdvisorRunner {
         LOG.info("Stack-advisor output={}, error={}", outputFile, errorFile);
 
         int exitCode = process.waitFor();
+        String outMessage;
+        String errMessage = null;
         try {
-          String outMessage = FileUtils.readFileToString(new File(outputFile));
-          String errMessage = FileUtils.readFileToString(new File(errorFile));
+          outMessage = FileUtils.readFileToString(new File(outputFile)).trim();
+          errMessage = FileUtils.readFileToString(new File(errorFile)).trim();
           LOG.info("Stack advisor output files");
           LOG.info("    advisor script stdout: {}", outMessage);
           LOG.info("    advisor script stderr: {}", errMessage);
         } catch (IOException io) {
           LOG.error("Error in reading script log files", io);
         }
-
-        return exitCode == 0;
+        if (exitCode > 0) {
+          String errorMessage;
+          if (errMessage != null) {
+            errorMessage = errMessage.substring(errMessage.lastIndexOf("\n"));
+          } else {
+            errorMessage = "Error occurred during stack advisor execution";
+          }
+          switch (exitCode) {
+            case 1:
+              throw new StackAdvisorRequestException(errorMessage);
+            case 2:
+              throw new StackAdvisorException(errorMessage);
+          }
+        }
       } finally {
         process.destroy();
       }
+    } catch (StackAdvisorException ex) {
+      throw ex;
     } catch (Exception ioe) {
-      LOG.error("Error executing stack advisor", ioe);
-      return false;
+      String message = "Error executing stack advisor: ";
+      LOG.error(message, ioe);
+      throw new StackAdvisorException(message + ioe.getMessage());
     }
   }
 

+ 5 - 9
ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommand.java

@@ -224,21 +224,17 @@ public abstract class StackAdvisorCommand<T extends StackAdvisorResponse> extend
       FileUtils.writeStringToFile(new File(requestDirectory, "services.json"),
           adjusted.servicesJSON);
 
-      boolean success = saRunner.runScript(stackAdvisorScript, getCommandType(), requestDirectory);
-      if (!success) {
-        String message = "Stack advisor script finished with errors";
-        LOG.warn(message);
-        throw new StackAdvisorException(message);
-      }
-
+      saRunner.runScript(stackAdvisorScript, getCommandType(), requestDirectory);
       String result = FileUtils.readFileToString(new File(requestDirectory, getResultFileName()));
 
       T response = this.mapper.readValue(result, this.type);
       return updateResponse(request, setRequestId(response));
+    } catch (StackAdvisorException ex) {
+      throw ex;
     } catch (Exception e) {
-      String message = "Error occured during stack advisor command invocation";
+      String message = "Error occured during stack advisor command invocation: ";
       LOG.warn(message, e);
-      throw new StackAdvisorException(message, e);
+      throw new StackAdvisorException(message + e.getMessage());
     }
   }
 

+ 46 - 12
ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java

@@ -260,12 +260,11 @@ public class Configuration {
   private static final String RESOURCES_DIR_DEFAULT =
       "/var/lib/ambari-server/resources/";
   private static final String ANONYMOUS_AUDIT_NAME_KEY = "anonymous.audit.name";
-  private static final String CLIENT_SECURITY_DEFAULT = "local";
+
   private static final int CLIENT_API_PORT_DEFAULT = 8080;
   private static final int CLIENT_API_SSL_PORT_DEFAULT = 8443;
-  private static final String USER_ROLE_NAME_DEFAULT = "user";
-  private static final String ADMIN_ROLE_NAME_DEFAULT = "admin";
   private static final String LDAP_BIND_ANONYMOUSLY_DEFAULT = "true";
+
   //TODO For embedded server only - should be removed later
   private static final String LDAP_PRIMARY_URL_DEFAULT = "localhost:33389";
   private static final String LDAP_BASE_DN_DEFAULT = "dc=ambari,dc=apache,dc=org";
@@ -309,6 +308,8 @@ public class Configuration {
   private static final String VIEW_EXTRACTION_THREADPOOL_TIMEOUT_KEY = "view.extraction.threadpool.timeout";
   private static final long VIEW_EXTRACTION_THREADPOOL_TIMEOUT_DEFAULT = 100000L;
 
+  private static final String SERVER_HTTP_SESSION_INACTIVE_TIMEOUT = "server.http.session.inactive_timeout";
+
   private static final Logger LOG = LoggerFactory.getLogger(
       Configuration.class);
   private Properties properties;
@@ -401,7 +402,7 @@ public class Configuration {
     }
     configsMap.put(SRVR_CRT_PASS_KEY, password);
 
-    if (this.getApiSSLAuthentication()) {
+    if (getApiSSLAuthentication()) {
       LOG.info("API SSL Authentication is turned on.");
       File httpsPassFile = new File(configsMap.get(CLIENT_API_SSL_KSTR_DIR_NAME_KEY)
         + File.separator + configsMap.get(CLIENT_API_SSL_CRT_PASS_FILE_NAME_KEY));
@@ -464,14 +465,14 @@ public class Configuration {
   private synchronized void loadCredentialProvider() {
     if (!credentialProviderInitialized) {
       try {
-        this.credentialProvider = new CredentialProvider(null,
+        credentialProvider = new CredentialProvider(null,
           getMasterKeyLocation(), isMasterKeyPersisted());
       } catch (Exception e) {
         LOG.info("Credential provider creation failed. Reason: " + e.getMessage());
         if (LOG.isDebugEnabled()) {
           e.printStackTrace();
         }
-        this.credentialProvider = null;
+        credentialProvider = null;
       }
       credentialProviderInitialized = true;
     }
@@ -487,8 +488,9 @@ public class Configuration {
     //Get property file stream from classpath
     InputStream inputStream = Configuration.class.getClassLoader().getResourceAsStream(CONFIG_FILE);
 
-    if (inputStream == null)
+    if (inputStream == null) {
       throw new RuntimeException(CONFIG_FILE + " not found in classpath");
+    }
 
     // load the properties
     try {
@@ -531,8 +533,9 @@ public class Configuration {
   public String getBootSetupAgentPassword() {
     String pass = configsMap.get(PASSPHRASE_KEY);
 
-    if (null != pass)
+    if (null != pass) {
       return pass;
+    }
 
     // fallback
     return properties.getProperty(BOOTSTRAP_SETUP_AGENT_PASSWORD, "password");
@@ -576,6 +579,20 @@ public class Configuration {
     properties.setProperty(CLIENT_SECURITY_KEY, type.toString());
   }
 
+  public void setLdap(String host, String userClass, String userNameAttr, String groupClass, String groupName, String groupMember,
+      String baseDN, boolean anon, String managerDN, String managerPass) {
+    properties.setProperty(LDAP_PRIMARY_URL_KEY, host);
+    properties.setProperty(LDAP_USER_OBJECT_CLASS_KEY, userClass);
+    properties.setProperty(LDAP_USERNAME_ATTRIBUTE_KEY, userNameAttr);
+    properties.setProperty(LDAP_GROUP_OBJECT_CLASS_KEY, groupClass);
+    properties.setProperty(LDAP_GROUP_NAMING_ATTR_KEY, groupName);
+    properties.setProperty(LDAP_GROUP_MEMEBERSHIP_ATTR_KEY, groupMember);
+    properties.setProperty(LDAP_BASE_DN_KEY, baseDN);
+    properties.setProperty(LDAP_BIND_ANONYMOUSLY_KEY, String.valueOf(anon));
+    properties.setProperty(LDAP_MANAGER_DN_KEY, managerDN);
+    properties.setProperty(LDAP_MANAGER_PASSWORD_KEY, managerPass);
+  }
+
   public String getWebAppDir() {
     LOG.info("Web App DIR test " + properties.getProperty(WEBAPP_DIR));
     return properties.getProperty(WEBAPP_DIR, "web");
@@ -671,8 +688,9 @@ public class Configuration {
 
   public String getLocalDatabaseUrl() {
     String dbName = properties.getProperty(SERVER_DB_NAME_KEY);
-    if(dbName == null || dbName.isEmpty())
+    if(dbName == null || dbName.isEmpty()) {
       throw new RuntimeException("Server DB Name is not configured!");
+    }
 
     return JDBC_LOCAL_URL + dbName;
   }
@@ -688,10 +706,11 @@ public class Configuration {
       dbpasswd = readPasswordFromStore(passwdProp);
     }
 
-    if (dbpasswd != null)
+    if (dbpasswd != null) {
       return dbpasswd;
-    else
+    } else {
       return readPasswordFromFile(passwdProp, SERVER_JDBC_USER_PASSWD_DEFAULT);
+    }
   }
 
   public String getRcaDatabaseDriver() {
@@ -710,8 +729,9 @@ public class Configuration {
     String passwdProp = properties.getProperty(SERVER_JDBC_RCA_USER_PASSWD_KEY);
     if (passwdProp != null) {
       String dbpasswd = readPasswordFromStore(passwdProp);
-      if (dbpasswd != null)
+      if (dbpasswd != null) {
         return dbpasswd;
+      }
     }
     return readPasswordFromFile(passwdProp, SERVER_JDBC_RCA_USER_PASSWD_DEFAULT);
   }
@@ -1067,4 +1087,18 @@ public class Configuration {
     return Long.parseLong(properties.getProperty(
         VIEW_EXTRACTION_THREADPOOL_TIMEOUT_KEY, String.valueOf(VIEW_EXTRACTION_THREADPOOL_TIMEOUT_DEFAULT)));
   }
+
+  /**
+   * Gets the inactivity timeout value, in seconds, for sessions created in
+   * Jetty by Spring Security. Without this timeout value, each request to the
+   * REST APIs will create new sessions that are never reaped since their
+   * default time is -1.
+   *
+   * @return the time value or {@code 60} seconds for default.
+   */
+  public int getHttpSessionInactiveTimeout() {
+    return Integer.parseInt(properties.getProperty(
+        SERVER_HTTP_SESSION_INACTIVE_TIMEOUT,
+        "60"));
+  }
 }

+ 38 - 5
ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariHandlerList.java

@@ -17,6 +17,12 @@
  */
 package org.apache.ambari.server.controller;
 
+import java.util.HashMap;
+import java.util.Map;
+
+import javax.inject.Inject;
+import javax.inject.Singleton;
+
 import org.apache.ambari.server.orm.entities.ViewEntity;
 import org.apache.ambari.server.orm.entities.ViewInstanceEntity;
 import org.apache.ambari.server.view.ViewContextImpl;
@@ -25,12 +31,12 @@ import org.apache.ambari.server.view.ViewRegistry;
 import org.apache.ambari.view.SystemException;
 import org.apache.ambari.view.ViewContext;
 import org.eclipse.jetty.server.Handler;
+import org.eclipse.jetty.server.SessionManager;
+import org.eclipse.jetty.servlet.FilterHolder;
 import org.eclipse.jetty.webapp.WebAppContext;
-
-import javax.inject.Inject;
-import javax.inject.Singleton;
-import java.util.HashMap;
-import java.util.Map;
+import org.springframework.web.context.WebApplicationContext;
+import org.springframework.web.context.support.GenericWebApplicationContext;
+import org.springframework.web.filter.DelegatingFilterProxy;
 
 /**
  * An Ambari specific extension of the FailsafeHandlerList that allows for the addition
@@ -45,6 +51,15 @@ public class AmbariHandlerList extends FailsafeHandlerList implements ViewInstan
   @Inject
   ViewRegistry viewRegistry;
 
+  /**
+   * Session manager.
+   */
+  @Inject
+  SessionManager sessionManager;
+
+  @Inject
+  DelegatingFilterProxy springSecurityFilter;
+
   /**
    * The Handler factory.
    */
@@ -55,6 +70,10 @@ public class AmbariHandlerList extends FailsafeHandlerList implements ViewInstan
    */
   private final Map<ViewInstanceEntity, Handler> handlerMap = new HashMap<ViewInstanceEntity, Handler>();
 
+  /**
+   * Spring web app context.
+   */
+  private GenericWebApplicationContext springWebAppContext;
 
   // ----- Constructors ------------------------------------------------------
 
@@ -72,6 +91,10 @@ public class AmbariHandlerList extends FailsafeHandlerList implements ViewInstan
         context.setClassLoader(viewInstanceDefinition.getViewEntity().getClassLoader());
         context.setAttribute(ViewContext.CONTEXT_ATTRIBUTE, new ViewContextImpl(viewInstanceDefinition, viewRegistry));
 
+        context.getSessionHandler().setSessionManager(sessionManager);
+        context.getServletContext().setAttribute(WebApplicationContext.ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE, springWebAppContext);
+        context.addFilter(new FilterHolder(springSecurityFilter), "/*", 1);
+
         return context;
       }
     };
@@ -87,6 +110,16 @@ public class AmbariHandlerList extends FailsafeHandlerList implements ViewInstan
     this.handlerFactory = handlerFactory;
   }
 
+  /**
+   * Sets the spring web app context.
+   *
+   * @param springWebAppContext the spring web app context
+   */
+  public void setSpringWebAppContext(
+      GenericWebApplicationContext springWebAppContext) {
+    this.springWebAppContext = springWebAppContext;
+  }
+
 
   // ----- ViewInstanceHandler -----------------------------------------------
 

+ 5 - 11
ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementController.java

@@ -24,6 +24,7 @@ import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.controller.internal.RequestStageContainer;
 import org.apache.ambari.server.metadata.RoleCommandOrder;
 import org.apache.ambari.server.scheduler.ExecutionScheduleManager;
+import org.apache.ambari.server.security.ldap.LdapSyncDto;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.ConfigHelper;
@@ -36,6 +37,7 @@ import org.apache.ambari.server.state.ServiceFactory;
 import org.apache.ambari.server.state.State;
 import org.apache.ambari.server.state.configgroup.ConfigGroupFactory;
 import org.apache.ambari.server.state.scheduler.RequestExecutionFactory;
+
 import java.util.Collection;
 import java.util.List;
 import java.util.Map;
@@ -689,20 +691,12 @@ public interface AmbariManagementController {
   public boolean checkLdapConfigured();
 
   /**
-   * Retrieves users from external LDAP.
-   *
-   * @return key-value pairs UserName-Synced
-   * @throws AmbariException if LDAP is configured incorrectly
-   */
-  public Map<String, Boolean> getLdapUsersSyncInfo() throws AmbariException;
-
-  /**
-   * Retrieves groups from external LDAP.
+   * Retrieves groups and users from external LDAP.
    *
-   * @return key-value pairs GroupName-Synced
+   * @return ldap sync DTO
    * @throws AmbariException if LDAP is configured incorrectly
    */
-  public Map<String, Boolean> getLdapGroupsSyncInfo() throws AmbariException;
+  public LdapSyncDto getLdapSyncInfo() throws AmbariException;
 
   /**
    * Synchronizes local users and groups with given data.

+ 13 - 21
ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java

@@ -91,11 +91,13 @@ import org.apache.ambari.server.customactions.ActionDefinition;
 import org.apache.ambari.server.metadata.ActionMetadata;
 import org.apache.ambari.server.metadata.RoleCommandOrder;
 import org.apache.ambari.server.scheduler.ExecutionScheduleManager;
-import org.apache.ambari.server.security.authorization.AmbariLdapDataPopulator;
 import org.apache.ambari.server.security.authorization.AuthorizationHelper;
 import org.apache.ambari.server.security.authorization.Group;
 import org.apache.ambari.server.security.authorization.User;
 import org.apache.ambari.server.security.authorization.Users;
+import org.apache.ambari.server.security.ldap.AmbariLdapDataPopulator;
+import org.apache.ambari.server.security.ldap.LdapBatchDto;
+import org.apache.ambari.server.security.ldap.LdapSyncDto;
 import org.apache.ambari.server.stageplanner.RoleGraph;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
@@ -143,7 +145,6 @@ import org.slf4j.LoggerFactory;
 
 import com.google.common.cache.Cache;
 import com.google.common.cache.CacheBuilder;
-import com.google.common.collect.Multimaps;
 import com.google.gson.Gson;
 import com.google.inject.Inject;
 import com.google.inject.Injector;
@@ -724,11 +725,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
         throw new AmbariException("User already exists.");
       }
 
-      users.createUser(request.getUsername(), request.getPassword(), request.isActive(), request.isAdmin());
-
-      if (null != request.isActive() && null != user) {
-        users.setUserActive(user, request.isActive());
-      }
+      users.createUser(request.getUsername(), request.getPassword(), request.isActive(), request.isAdmin(), false);
     }
   }
 
@@ -1805,7 +1802,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
             RoleCommand roleCommand;
             State oldSchState = scHost.getState();
             ServiceComponentHostEvent event;
-            
+
             switch (newState) {
               case INSTALLED:
                 if (oldSchState == State.INIT
@@ -1960,7 +1957,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
                     break;
                 }
               }
-              
+
               if (null == requestParameters) {
                 requestParameters = new HashMap<String, String>();
               }
@@ -1972,7 +1969,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
           }
         }
       }
-      
+
       for (String serviceName : smokeTestServices) { // Creates smoke test commands
         Service s = cluster.getService(serviceName);
         // find service component host
@@ -3313,7 +3310,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
     }
     return response;
   }
-  
+
   @Override
   public Set<StackConfigurationResponse> getStackLevelConfigurations(
       Set<StackLevelConfigurationRequest> requests) throws AmbariException {
@@ -3322,7 +3319,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
 
       String stackName    = request.getStackName();
       String stackVersion = request.getStackVersion();
-      
+
       Set<StackConfigurationResponse> stackConfigurations = getStackLevelConfigurations(request);
 
       for (StackConfigurationResponse stackConfigurationResponse : stackConfigurations) {
@@ -3673,19 +3670,14 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
   }
 
   @Override
-  public Map<String, Boolean> getLdapUsersSyncInfo() throws AmbariException {
-    return ldapDataPopulator.getLdapUsersSyncInfo();
-  }
-
-  @Override
-  public Map<String, Boolean> getLdapGroupsSyncInfo() throws AmbariException {
-    return ldapDataPopulator.getLdapGroupsSyncInfo();
+  public LdapSyncDto getLdapSyncInfo() throws AmbariException {
+    return ldapDataPopulator.getLdapSyncInfo();
   }
 
   @Override
   public synchronized void synchronizeLdapUsersAndGroups(Set<String> users,
       Set<String> groups) throws AmbariException {
-    ldapDataPopulator.synchronizeLdapUsersAndGroups(users, groups);
+    final LdapBatchDto batchInfo = ldapDataPopulator.synchronizeLdapUsersAndGroups(users, groups);
+    this.users.processLdapSync(batchInfo);
   }
-
 }

+ 40 - 12
ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java

@@ -80,10 +80,10 @@ import org.apache.ambari.server.security.CertificateManager;
 import org.apache.ambari.server.security.SecurityFilter;
 import org.apache.ambari.server.security.authorization.AmbariAuthorizationFilter;
 import org.apache.ambari.server.security.authorization.AmbariLdapAuthenticationProvider;
-import org.apache.ambari.server.security.authorization.AmbariLdapDataPopulator;
 import org.apache.ambari.server.security.authorization.AmbariLocalUserDetailsService;
 import org.apache.ambari.server.security.authorization.Users;
 import org.apache.ambari.server.security.authorization.internal.AmbariInternalAuthenticationProvider;
+import org.apache.ambari.server.security.ldap.AmbariLdapDataPopulator;
 import org.apache.ambari.server.security.unsecured.rest.CertificateDownload;
 import org.apache.ambari.server.security.unsecured.rest.CertificateSign;
 import org.apache.ambari.server.security.unsecured.rest.ConnectionInfo;
@@ -94,6 +94,8 @@ import org.apache.ambari.server.utils.VersionUtils;
 import org.apache.ambari.server.view.ViewRegistry;
 import org.eclipse.jetty.server.Connector;
 import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.server.SessionIdManager;
+import org.eclipse.jetty.server.SessionManager;
 import org.eclipse.jetty.server.nio.SelectChannelConnector;
 import org.eclipse.jetty.server.ssl.SslSelectChannelConnector;
 import org.eclipse.jetty.servlet.DefaultServlet;
@@ -157,6 +159,21 @@ public class AmbariServer {
   @Inject
   AmbariHandlerList handlerList;
 
+  /**
+   * Session manager.
+   */
+  @Inject
+  SessionManager sessionManager;
+
+  /**
+   * Session ID manager.
+   */
+  @Inject
+  SessionIdManager sessionIdManager;
+
+  @Inject
+  DelegatingFilterProxy springSecurityFilter;
+
   public String getServerOsType() {
     return configs.getServerOsType();
   }
@@ -176,6 +193,7 @@ public class AmbariServer {
     performStaticInjection();
     initDB();
     server = new Server();
+    server.setSessionIdManager(sessionIdManager);
     Server serverForAgent = new Server();
 
     checkDBVersion();
@@ -213,9 +231,20 @@ public class AmbariServer {
 
       root.setContextPath(CONTEXT_PATH);
       root.setErrorHandler(injector.getInstance(AmbariErrorHandler.class));
+      root.getSessionHandler().setSessionManager(sessionManager);
+
+      SessionManager jettySessionManager = root.getSessionHandler().getSessionManager();
+
+      // use AMBARISESSIONID instead of JSESSIONID to avoid conflicts with
+      // other services (like HDFS) that run on the same context but a different
+      // port
+      jettySessionManager.setSessionCookie("AMBARISESSIONID");
 
-      //Changing session cookie name to avoid conflicts
-      root.getSessionHandler().getSessionManager().setSessionCookie("AMBARISESSIONID");
+      // each request that does not use AMBARISESSIONID will create a new
+      // HashedSession in Jetty; these MUST be reaped after inactivity in order
+      // to prevent a memory leak
+      int sessionInactivityTimeout = configs.getHttpSessionInactiveTimeout();
+      jettySessionManager.setMaxInactiveInterval(sessionInactivityTimeout);
 
       GenericWebApplicationContext springWebAppContext = new GenericWebApplicationContext();
       springWebAppContext.setServletContext(root.getServletContext());
@@ -226,11 +255,14 @@ public class AmbariServer {
       root.getServletContext().setAttribute(
           WebApplicationContext.ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE,
           springWebAppContext);
+      handlerList.setSpringWebAppContext(springWebAppContext);
 
       certMan.initRootCert();
 
-      ServletContextHandler agentroot = new ServletContextHandler(serverForAgent,
-          "/", ServletContextHandler.SESSIONS );
+      // the agent communication (heartbeats, registration, etc) is stateless
+      // and does not use sessions.
+      ServletContextHandler agentroot = new ServletContextHandler(
+          serverForAgent, "/", ServletContextHandler.NO_SESSIONS);
 
       ServletHolder rootServlet = root.addServlet(DefaultServlet.class, "/");
       rootServlet.setInitParameter("dirAllowed", "false");
@@ -240,17 +272,13 @@ public class AmbariServer {
       rootServlet = agentroot.addServlet(DefaultServlet.class, "/");
       rootServlet.setInitOrder(1);
 
-      //Spring Security Filter initialization
-      DelegatingFilterProxy springSecurityFilter = new DelegatingFilterProxy();
-      springSecurityFilter.setTargetBeanName("springSecurityFilterChain");
-
       //session-per-request strategy for api and agents
       root.addFilter(new FilterHolder(injector.getInstance(AmbariPersistFilter.class)), "/api/*", 1);
       root.addFilter(new FilterHolder(injector.getInstance(AmbariPersistFilter.class)), "/proxy/*", 1);
       root.addFilter(new FilterHolder(new MethodOverrideFilter()), "/api/*", 1);
       root.addFilter(new FilterHolder(new MethodOverrideFilter()), "/proxy/*", 1);
-      agentroot.addFilter(new FilterHolder(injector.getInstance(AmbariPersistFilter.class)), "/agent/*", 1);
 
+      agentroot.addFilter(new FilterHolder(injector.getInstance(AmbariPersistFilter.class)), "/agent/*", 1);
       agentroot.addFilter(SecurityFilter.class, "/*", 1);
 
       if (configs.getApiAuthentication()) {
@@ -467,8 +495,8 @@ public class AmbariServer {
       LOG.info("Database init needed - creating default data");
       Users users = injector.getInstance(Users.class);
 
-      users.createUser("admin", "admin", true, true);
-      users.createUser("user", "user", true, false);
+      users.createUser("admin", "admin");
+      users.createUser("user", "user");
 
       MetainfoEntity schemaVersion = new MetainfoEntity();
       schemaVersion.setMetainfoName(Configuration.SERVER_VERSION_KEY);

+ 43 - 26
ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java

@@ -18,14 +18,24 @@
 
 package org.apache.ambari.server.controller;
 
-import com.google.gson.Gson;
-import com.google.gson.GsonBuilder;
-import com.google.inject.AbstractModule;
-import com.google.inject.Scopes;
-import com.google.inject.assistedinject.FactoryModuleBuilder;
-import com.google.inject.name.Names;
-import com.google.inject.persist.PersistModule;
-import com.google.inject.persist.jpa.AmbariJpaPersistModule;
+import static org.eclipse.persistence.config.PersistenceUnitProperties.CREATE_JDBC_DDL_FILE;
+import static org.eclipse.persistence.config.PersistenceUnitProperties.CREATE_ONLY;
+import static org.eclipse.persistence.config.PersistenceUnitProperties.CREATE_OR_EXTEND;
+import static org.eclipse.persistence.config.PersistenceUnitProperties.DDL_BOTH_GENERATION;
+import static org.eclipse.persistence.config.PersistenceUnitProperties.DDL_GENERATION;
+import static org.eclipse.persistence.config.PersistenceUnitProperties.DDL_GENERATION_MODE;
+import static org.eclipse.persistence.config.PersistenceUnitProperties.DROP_AND_CREATE;
+import static org.eclipse.persistence.config.PersistenceUnitProperties.DROP_JDBC_DDL_FILE;
+import static org.eclipse.persistence.config.PersistenceUnitProperties.JDBC_DRIVER;
+import static org.eclipse.persistence.config.PersistenceUnitProperties.JDBC_PASSWORD;
+import static org.eclipse.persistence.config.PersistenceUnitProperties.JDBC_URL;
+import static org.eclipse.persistence.config.PersistenceUnitProperties.JDBC_USER;
+import static org.eclipse.persistence.config.PersistenceUnitProperties.THROW_EXCEPTIONS;
+
+import java.security.SecureRandom;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Properties;
 
 import org.apache.ambari.server.actionmanager.ActionDBAccessor;
 import org.apache.ambari.server.actionmanager.ActionDBAccessorImpl;
@@ -77,27 +87,22 @@ import org.apache.ambari.server.state.scheduler.RequestExecutionFactory;
 import org.apache.ambari.server.state.scheduler.RequestExecutionImpl;
 import org.apache.ambari.server.state.svccomphost.ServiceComponentHostImpl;
 import org.apache.ambari.server.view.ViewInstanceHandlerList;
+import org.eclipse.jetty.server.SessionIdManager;
+import org.eclipse.jetty.server.SessionManager;
+import org.eclipse.jetty.server.session.HashSessionIdManager;
+import org.eclipse.jetty.server.session.HashSessionManager;
 import org.springframework.security.crypto.password.PasswordEncoder;
 import org.springframework.security.crypto.password.StandardPasswordEncoder;
+import org.springframework.web.filter.DelegatingFilterProxy;
 
-import java.security.SecureRandom;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Properties;
-
-import static org.eclipse.persistence.config.PersistenceUnitProperties.CREATE_JDBC_DDL_FILE;
-import static org.eclipse.persistence.config.PersistenceUnitProperties.CREATE_ONLY;
-import static org.eclipse.persistence.config.PersistenceUnitProperties.CREATE_OR_EXTEND;
-import static org.eclipse.persistence.config.PersistenceUnitProperties.DDL_BOTH_GENERATION;
-import static org.eclipse.persistence.config.PersistenceUnitProperties.DDL_GENERATION;
-import static org.eclipse.persistence.config.PersistenceUnitProperties.DDL_GENERATION_MODE;
-import static org.eclipse.persistence.config.PersistenceUnitProperties.DROP_AND_CREATE;
-import static org.eclipse.persistence.config.PersistenceUnitProperties.DROP_JDBC_DDL_FILE;
-import static org.eclipse.persistence.config.PersistenceUnitProperties.JDBC_DRIVER;
-import static org.eclipse.persistence.config.PersistenceUnitProperties.JDBC_PASSWORD;
-import static org.eclipse.persistence.config.PersistenceUnitProperties.JDBC_URL;
-import static org.eclipse.persistence.config.PersistenceUnitProperties.JDBC_USER;
-import static org.eclipse.persistence.config.PersistenceUnitProperties.THROW_EXCEPTIONS;
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import com.google.inject.AbstractModule;
+import com.google.inject.Scopes;
+import com.google.inject.assistedinject.FactoryModuleBuilder;
+import com.google.inject.name.Names;
+import com.google.inject.persist.PersistModule;
+import com.google.inject.persist.jpa.AmbariJpaPersistModule;
 
 /**
  * Used for injection purposes.
@@ -168,9 +173,21 @@ public class ControllerModule extends AbstractModule {
   protected void configure() {
     installFactories();
 
+    final SessionIdManager sessionIdManager = new HashSessionIdManager();
+    final SessionManager sessionManager = new HashSessionManager();
+    sessionManager.setSessionPath("/");
+    sessionManager.setSessionIdManager(sessionIdManager);
+    bind(SessionManager.class).toInstance(sessionManager);
+    bind(SessionIdManager.class).toInstance(sessionIdManager);
+
     bind(Configuration.class).toInstance(configuration);
     bind(HostsMap.class).toInstance(hostsMap);
     bind(PasswordEncoder.class).toInstance(new StandardPasswordEncoder());
+    bind(DelegatingFilterProxy.class).toInstance(new DelegatingFilterProxy() {
+      {
+        setTargetBeanName("springSecurityFilterChain");
+      }
+    });
     bind(Gson.class).annotatedWith(Names.named("prettyGson")).toInstance(prettyGson);
 
     install(buildJpaPersistModule());

+ 20 - 2
ambari-server/src/main/java/org/apache/ambari/server/controller/FailsafeServletResponse.java

@@ -18,6 +18,8 @@
 package org.apache.ambari.server.controller;
 
 import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
 
 import javax.servlet.http.HttpServletResponse;
 import javax.servlet.http.HttpServletResponseWrapper;
@@ -27,8 +29,16 @@ import javax.servlet.http.HttpServletResponseWrapper;
  * errors on failed requests.
  */
 public class FailsafeServletResponse extends HttpServletResponseWrapper {
+  /**
+   * Indicates that request failed.
+   */
   private boolean error;
 
+  /**
+   * List of errors which should not be consumed by fail-safe handler.
+   */
+  private List<Integer> allowedErrors = Arrays.asList(HttpServletResponse.SC_FORBIDDEN);
+
   /**
    * Constructor.
    *
@@ -40,12 +50,20 @@ public class FailsafeServletResponse extends HttpServletResponseWrapper {
 
   @Override
   public void sendError(int sc) throws IOException {
-    error = true;
+    if (allowedErrors.contains(sc)) {
+      super.sendError(sc);
+    } else {
+      error = true;
+    }
   }
 
   @Override
   public void sendError(int sc, String msg) throws IOException {
-    error = true;
+    if (allowedErrors.contains(sc)) {
+      super.sendError(sc, msg);
+    } else {
+      error = true;
+    }
   }
 
   /**

+ 13 - 9
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ControllerResourceProvider.java

@@ -23,7 +23,6 @@ import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
-import java.util.Map.Entry;
 import java.util.Set;
 
 import org.apache.ambari.server.AmbariException;
@@ -40,6 +39,9 @@ import org.apache.ambari.server.controller.spi.ResourceAlreadyExistsException;
 import org.apache.ambari.server.controller.spi.SystemException;
 import org.apache.ambari.server.controller.spi.UnsupportedPropertyException;
 import org.apache.ambari.server.controller.utilities.PropertyHelper;
+import org.apache.ambari.server.security.ldap.LdapGroupDto;
+import org.apache.ambari.server.security.ldap.LdapSyncDto;
+import org.apache.ambari.server.security.ldap.LdapUserDto;
 import org.apache.commons.lang.StringUtils;
 
 /**
@@ -137,12 +139,14 @@ class ControllerResourceProvider extends AbstractControllerResourceProvider {
             ldapConfigured, requestedIds);
         if (ldapConfigured) {
           try {
+            final LdapSyncDto syncInfo = getManagementController().getLdapSyncInfo();
+
             final List<String> allUsers = new ArrayList<String>();
             final List<String> syncedUsers = new ArrayList<String>();
-            for (Entry<String, Boolean> user : getManagementController().getLdapUsersSyncInfo().entrySet()) {
-              allUsers.add(user.getKey());
-              if (user.getValue()) {
-                syncedUsers.add(user.getKey());
+            for (LdapUserDto user : syncInfo.getUsers()) {
+              allUsers.add(user.getUserName());
+              if (user.isSynced()) {
+                syncedUsers.add(user.getUserName());
               }
             }
             setResourceProperty(resource, CONTROLLER_LDAP_USERS_PROPERTY_ID,
@@ -151,10 +155,10 @@ class ControllerResourceProvider extends AbstractControllerResourceProvider {
                 syncedUsers, requestedIds);
             final List<String> allGroups = new ArrayList<String>();
             final List<String> syncedGroups = new ArrayList<String>();
-            for (Entry<String, Boolean> group : getManagementController().getLdapGroupsSyncInfo().entrySet()) {
-              allGroups.add(group.getKey());
-              if (group.getValue()) {
-                syncedGroups.add(group.getKey());
+            for (LdapGroupDto group : syncInfo.getGroups()) {
+              allGroups.add(group.getGroupName());
+              if (group.isSynced()) {
+                syncedGroups.add(group.getGroupName());
               }
             }
             setResourceProperty(resource, CONTROLLER_LDAP_GROUPS_PROPERTY_ID,

+ 5 - 1
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/PrivilegeResourceProvider.java

@@ -222,7 +222,7 @@ public abstract class PrivilegeResourceProvider<T> extends AbstractResourceProvi
 
       for(PrivilegeEntity privilegeEntity : entitySet){
         Resource resource = toResource(privilegeEntity, userEntities, groupEntities, resourceEntities, requestedIds);
-        if (predicate == null || predicate.evaluate(resource)) {
+        if (resource != null && (predicate == null || predicate.evaluate(resource))) {
           resources.add(resource);
         }
       }
@@ -441,6 +441,10 @@ public abstract class PrivilegeResourceProvider<T> extends AbstractResourceProvi
         if (resource == null) {
           // request body is empty, use predicate instead
           resource = getResourceEntityId(predicate);
+          // if the predicate does not identify a single resource or the resource is not available for update
+          if (resource == null) {
+            return null;
+          }
         }
         final List<PrivilegeEntity> currentPrivileges = privilegeDAO.findByResourceId(resource);
 

+ 7 - 2
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RecommendationResourceProvider.java

@@ -33,6 +33,7 @@ import javax.ws.rs.core.Response.Status;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorException;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRequest;
+import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRequestException;
 import org.apache.ambari.server.api.services.stackadvisor.recommendations.RecommendationResponse;
 import org.apache.ambari.server.api.services.stackadvisor.recommendations.RecommendationResponse.BindingHostGroup;
 import org.apache.ambari.server.api.services.stackadvisor.recommendations.RecommendationResponse.HostGroup;
@@ -90,10 +91,14 @@ public class RecommendationResourceProvider extends StackAdvisorResourceProvider
     final RecommendationResponse response;
     try {
       response = saHelper.recommend(recommendationRequest);
-    } catch (StackAdvisorException e) {
-      LOG.warn("Error occured during component-layout recommnedation", e);
+    } catch (StackAdvisorRequestException e) {
+      LOG.warn("Error occured during recommnedation", e);
       throw new WebApplicationException(Response.status(Status.BAD_REQUEST).entity(e.getMessage())
           .build());
+    } catch (StackAdvisorException e) {
+      LOG.warn("Error occured during recommnedation", e);
+      throw new WebApplicationException(Response.status(Status.INTERNAL_SERVER_ERROR).entity(e.getMessage())
+          .build());
     }
 
     Resource recommendation = createResources(new Command<Resource>() {

+ 7 - 2
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ValidationResourceProvider.java

@@ -33,6 +33,7 @@ import javax.ws.rs.core.Response.Status;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorException;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRequest;
+import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRequestException;
 import org.apache.ambari.server.api.services.stackadvisor.validations.ValidationResponse;
 import org.apache.ambari.server.api.services.stackadvisor.validations.ValidationResponse.ValidationItem;
 import org.apache.ambari.server.controller.AmbariManagementController;
@@ -82,10 +83,14 @@ public class ValidationResourceProvider extends StackAdvisorResourceProvider {
     final ValidationResponse response;
     try {
       response = saHelper.validate(validationRequest);
-    } catch (StackAdvisorException e) {
-      LOG.warn("Error occured during component-layout validation", e);
+    } catch (StackAdvisorRequestException e) {
+      LOG.warn("Error occurred during validation", e);
       throw new WebApplicationException(Response.status(Status.BAD_REQUEST).entity(e.getMessage())
           .build());
+    } catch (StackAdvisorException e) {
+      LOG.warn("Error occurred during validation", e);
+      throw new WebApplicationException(Response.status(Status.INTERNAL_SERVER_ERROR).entity(e.getMessage())
+          .build());
     }
 
     Resource validation = createResources(new Command<Resource>() {

+ 32 - 12
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewInstanceResourceProvider.java

@@ -145,13 +145,16 @@ public class ViewInstanceResourceProvider extends AbstractResourceProvider {
       String instanceName = (String) propertyMap.get(INSTANCE_NAME_PROPERTY_ID);
 
       for (ViewEntity viewDefinition : viewRegistry.getDefinitions()){
-        if (viewName == null || viewName.equals(viewDefinition.getCommonName())) {
-          for (ViewInstanceEntity viewInstanceDefinition : viewRegistry.getInstanceDefinitions(viewDefinition)) {
-            if (instanceName == null || instanceName.equals(viewInstanceDefinition.getName())) {
-              if (viewVersion == null || viewVersion.equals(viewDefinition.getVersion())) {
-                if (includeInstance(viewInstanceDefinition, true)) {
-                  Resource resource = toResource(viewInstanceDefinition, requestedIds);
-                  resources.add(resource);
+        // do not report instances for views that are not loaded.
+        if (viewDefinition.isLoaded()){
+          if (viewName == null || viewName.equals(viewDefinition.getCommonName())) {
+            for (ViewInstanceEntity viewInstanceDefinition : viewRegistry.getInstanceDefinitions(viewDefinition)) {
+              if (instanceName == null || instanceName.equals(viewInstanceDefinition.getName())) {
+                if (viewVersion == null || viewVersion.equals(viewDefinition.getVersion())) {
+                  if (includeInstance(viewInstanceDefinition, true)) {
+                    Resource resource = toResource(viewInstanceDefinition, requestedIds);
+                    resources.add(resource);
+                  }
                 }
               }
             }
@@ -336,6 +339,20 @@ public class ViewInstanceResourceProvider extends AbstractResourceProvider {
           ViewRegistry       viewRegistry   = ViewRegistry.getInstance();
           ViewInstanceEntity instanceEntity = toEntity(properties);
 
+          ViewEntity viewEntity = instanceEntity.getViewEntity();
+          String     viewName   = viewEntity.getCommonName();
+          String     version    = viewEntity.getVersion();
+          ViewEntity view       = viewRegistry.getDefinition(viewName, version);
+
+          if ( view == null ) {
+            throw new IllegalStateException("The view " + viewName + " is not registered.");
+          }
+
+          // the view must be in the LOADED state to create an instance
+          if (!view.isLoaded()) {
+            throw new IllegalStateException("The view " + viewName + " is not loaded.");
+          }
+
           if (viewRegistry.instanceExists(instanceEntity)) {
             throw new DuplicateResourceException("The instance " + instanceEntity.getName() + " already exists.");
           }
@@ -376,11 +393,14 @@ public class ViewInstanceResourceProvider extends AbstractResourceProvider {
         Set<ViewInstanceEntity> viewInstanceEntities = new HashSet<ViewInstanceEntity>();
 
         for (ViewEntity viewEntity : viewRegistry.getDefinitions()){
-          for (ViewInstanceEntity viewInstanceEntity : viewRegistry.getInstanceDefinitions(viewEntity)){
-            Resource resource = toResource(viewInstanceEntity, requestedIds);
-            if (predicate == null || predicate.evaluate(resource)) {
-              if (includeInstance(viewInstanceEntity, false)) {
-                viewInstanceEntities.add(viewInstanceEntity);
+          // the view must be in the LOADED state to delete an instance
+          if (viewEntity.isLoaded()) {
+            for (ViewInstanceEntity viewInstanceEntity : viewRegistry.getInstanceDefinitions(viewEntity)){
+              Resource resource = toResource(viewInstanceEntity, requestedIds);
+              if (predicate == null || predicate.evaluate(resource)) {
+                if (includeInstance(viewInstanceEntity, false)) {
+                  viewInstanceEntities.add(viewInstanceEntity);
+                }
               }
             }
           }

+ 6 - 2
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewPermissionResourceProvider.java

@@ -125,7 +125,11 @@ public class ViewPermissionResourceProvider extends AbstractResourceProvider {
       Object viewVersion = propertyMap.get(VIEW_VERSION_PROPERTY_ID);
       if (viewName != null && viewVersion != null) {
         ViewEntity viewEntity = viewRegistry.getDefinition(viewName.toString(), viewVersion.toString());
-        resources.add(toResource(viewUsePermission, viewEntity.getResourceType(), viewEntity, requestedIds));
+
+        // do not report permissions for views that are not loaded.
+        if (viewEntity.isLoaded()) {
+          resources.add(toResource(viewUsePermission, viewEntity.getResourceType(), viewEntity, requestedIds));
+        }
       }
     }
 
@@ -134,7 +138,7 @@ public class ViewPermissionResourceProvider extends AbstractResourceProvider {
 
       ViewEntity viewEntity = viewRegistry.getDefinition(resourceType);
 
-      if (viewEntity != null) {
+      if (viewEntity != null && viewEntity.isLoaded()) {
         resources.add(toResource(permissionEntity, resourceType, viewEntity, requestedIds));
       }
     }

+ 24 - 4
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewPrivilegeResourceProvider.java

@@ -115,7 +115,12 @@ public class ViewPrivilegeResourceProvider extends PrivilegeResourceProvider<Vie
       if (viewInstanceEntity == null) {
         throw new AmbariException("View instance " + instanceName + " of " + viewName + viewVersion + " was not found");
       }
-      return Collections.singletonMap(viewInstanceEntity.getResource().getId(), viewInstanceEntity);
+
+      ViewEntity view = viewInstanceEntity.getViewEntity();
+
+      return view.isLoaded() ?
+          Collections.singletonMap(viewInstanceEntity.getResource().getId(), viewInstanceEntity) :
+          Collections.<Long, ViewInstanceEntity>emptyMap();
     }
 
     Set<ViewEntity> viewEntities = new HashSet<ViewEntity>();
@@ -136,8 +141,10 @@ public class ViewPrivilegeResourceProvider extends PrivilegeResourceProvider<Vie
     Map<Long, ViewInstanceEntity> resourceEntities = new HashMap<Long, ViewInstanceEntity>();
 
     for (ViewEntity viewEntity : viewEntities) {
-      for (ViewInstanceEntity viewInstanceEntity : viewEntity.getInstances()) {
-        resourceEntities.put(viewInstanceEntity.getResource().getId(), viewInstanceEntity);
+      if (viewEntity.isLoaded()) {
+        for (ViewInstanceEntity viewInstanceEntity : viewEntity.getInstances()) {
+          resourceEntities.put(viewInstanceEntity.getResource().getId(), viewInstanceEntity);
+        }
       }
     }
     return resourceEntities;
@@ -150,10 +157,19 @@ public class ViewPrivilegeResourceProvider extends PrivilegeResourceProvider<Vie
     final String viewName     = getQueryParameterValue(PRIVILEGE_VIEW_NAME_PROPERTY_ID, predicate).toString();
     final String viewVersion  = getQueryParameterValue(PRIVILEGE_VIEW_VERSION_PROPERTY_ID, predicate).toString();
     final String instanceName = getQueryParameterValue(PRIVILEGE_INSTANCE_NAME_PROPERTY_ID, predicate).toString();
+
     final ViewInstanceEntity viewInstanceEntity = viewRegistry.getInstanceDefinition(viewName, viewVersion, instanceName);
-    return viewInstanceEntity.getResource().getId();
+
+    if (viewInstanceEntity != null) {
+
+      ViewEntity view = viewInstanceEntity.getViewEntity();
+
+      return view.isLoaded() ? viewInstanceEntity.getResource().getId() : null;
+    }
+    return null;
   }
 
+
   // ----- helper methods ----------------------------------------------------
 
   @Override
@@ -173,6 +189,10 @@ public class ViewPrivilegeResourceProvider extends PrivilegeResourceProvider<Vie
       ViewInstanceEntity viewInstanceEntity = resourceEntities.get(privilegeEntity.getResource().getId());
       ViewEntity         viewEntity         = viewInstanceEntity.getViewEntity();
 
+      if (!viewEntity.isLoaded()) {
+        return null;
+      }
+
       setResourceProperty(resource, PRIVILEGE_VIEW_NAME_PROPERTY_ID, viewEntity.getCommonName(), requestedIds);
       setResourceProperty(resource, PRIVILEGE_VIEW_VERSION_PROPERTY_ID, viewEntity.getVersion(), requestedIds);
       setResourceProperty(resource, PRIVILEGE_INSTANCE_NAME_PROPERTY_ID, viewInstanceEntity.getName(), requestedIds);

+ 3 - 3
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewVersionResourceProvider.java

@@ -98,7 +98,7 @@ public class ViewVersionResourceProvider extends AbstractResourceProvider {
   public RequestStatus createResources(Request request)
       throws SystemException, UnsupportedPropertyException,
       ResourceAlreadyExistsException, NoSuchParentResourceException {
-    throw new UnsupportedOperationException("Not yet supported.");
+    throw new UnsupportedOperationException("Not supported.");
   }
 
   @Override
@@ -147,13 +147,13 @@ public class ViewVersionResourceProvider extends AbstractResourceProvider {
   @Override
   public RequestStatus updateResources(Request request, Predicate predicate)
       throws SystemException, UnsupportedPropertyException, NoSuchResourceException, NoSuchParentResourceException {
-    throw new UnsupportedOperationException("Not yet supported.");
+    throw new UnsupportedOperationException("Not supported.");
   }
 
   @Override
   public RequestStatus deleteResources(Predicate predicate)
       throws SystemException, UnsupportedPropertyException, NoSuchResourceException, NoSuchParentResourceException {
-    throw new UnsupportedOperationException("Not yet supported.");
+    throw new UnsupportedOperationException("Not supported.");
   }
 
   @Override

+ 28 - 2
ambari-server/src/main/java/org/apache/ambari/server/orm/dao/GroupDAO.java

@@ -17,8 +17,11 @@
  */
 package org.apache.ambari.server.orm.dao;
 
+import java.util.Arrays;
 import java.util.Collections;
+import java.util.HashSet;
 import java.util.List;
+import java.util.Set;
 
 import javax.persistence.EntityManager;
 import javax.persistence.NoResultException;
@@ -30,6 +33,7 @@ import com.google.inject.Inject;
 import com.google.inject.Provider;
 import com.google.inject.Singleton;
 import com.google.inject.persist.Transactional;
+
 import org.apache.ambari.server.orm.entities.PrincipalEntity;
 
 @Singleton
@@ -79,8 +83,15 @@ public class GroupDAO {
 
   @Transactional
   public void create(GroupEntity group) {
-    group.setGroupName(group.getGroupName().toLowerCase());
-    entityManagerProvider.get().persist(group);
+    create(new HashSet<GroupEntity>(Arrays.asList(group)));
+  }
+
+  @Transactional
+  public void create(Set<GroupEntity> groups) {
+    for (GroupEntity group: groups) {
+      group.setGroupName(group.getGroupName().toLowerCase());
+      entityManagerProvider.get().persist(group);
+    }
   }
 
   @Transactional
@@ -89,12 +100,27 @@ public class GroupDAO {
     return entityManagerProvider.get().merge(group);
   }
 
+  @Transactional
+  public void merge(Set<GroupEntity> groups) {
+    for (GroupEntity group: groups) {
+      group.setGroupName(group.getGroupName().toLowerCase());
+      entityManagerProvider.get().merge(group);
+    }
+  }
+
   @Transactional
   public void remove(GroupEntity group) {
     entityManagerProvider.get().remove(merge(group));
     entityManagerProvider.get().getEntityManagerFactory().getCache().evictAll();
   }
 
+  @Transactional
+  public void remove(Set<GroupEntity> groups) {
+    for (GroupEntity groupEntity: groups) {
+      entityManagerProvider.get().remove(entityManagerProvider.get().merge(groupEntity));
+    }
+  }
+
   @Transactional
   public void removeByPK(Integer groupPK) {
     remove(findByPK(groupPK));

+ 32 - 2
ambari-server/src/main/java/org/apache/ambari/server/orm/dao/MemberDAO.java

@@ -17,19 +17,23 @@
  */
 package org.apache.ambari.server.orm.dao;
 
+import java.util.Arrays;
+import java.util.HashSet;
 import java.util.List;
+import java.util.Set;
 
 import javax.persistence.EntityManager;
+import javax.persistence.NoResultException;
 import javax.persistence.TypedQuery;
 
 import org.apache.ambari.server.orm.RequiresSession;
 import org.apache.ambari.server.orm.entities.MemberEntity;
+import org.apache.ambari.server.orm.entities.UserEntity;
 
 import com.google.inject.Inject;
 import com.google.inject.Provider;
 import com.google.inject.Singleton;
 import com.google.inject.persist.Transactional;
-import org.apache.ambari.server.orm.entities.UserEntity;
 
 @Singleton
 public class MemberDAO {
@@ -43,6 +47,18 @@ public class MemberDAO {
     return entityManagerProvider.get().find(MemberEntity.class, memberPK);
   }
 
+  @RequiresSession
+  public MemberEntity findByUserAndGroup(String userName, String groupName) {
+    final TypedQuery<MemberEntity> query = entityManagerProvider.get().createNamedQuery("memberByUserAndGroup", MemberEntity.class);
+    query.setParameter("username", userName.toLowerCase());
+    query.setParameter("groupname", groupName.toLowerCase());
+    try {
+      return query.getSingleResult();
+    } catch (NoResultException e) {
+      return null;
+    }
+  }
+
   @RequiresSession
   public List<MemberEntity> findAll() {
     final TypedQuery<MemberEntity> query = entityManagerProvider.get().createQuery("SELECT m FROM MemberEntity m", MemberEntity.class);
@@ -57,7 +73,14 @@ public class MemberDAO {
 
   @Transactional
   public void create(MemberEntity member) {
-    entityManagerProvider.get().persist(member);
+    create(new HashSet<MemberEntity>(Arrays.asList(member)));
+  }
+
+  @Transactional
+  public void create(Set<MemberEntity> members) {
+    for (MemberEntity member: members) {
+      entityManagerProvider.get().persist(member);
+    }
   }
 
   @Transactional
@@ -70,6 +93,13 @@ public class MemberDAO {
     entityManagerProvider.get().remove(merge(member));
   }
 
+  @Transactional
+  public void remove(Set<MemberEntity> members) {
+    for (MemberEntity member: members) {
+      entityManagerProvider.get().remove(entityManagerProvider.get().merge(member));
+    }
+  }
+
   @Transactional
   public void removeByPK(Integer memberPK) {
     remove(findByPK(memberPK));

+ 20 - 7
ambari-server/src/main/java/org/apache/ambari/server/orm/dao/PrincipalDAO.java

@@ -18,17 +18,18 @@
 
 package org.apache.ambari.server.orm.dao;
 
+import java.util.Arrays;
+import java.util.List;
+import javax.persistence.EntityManager;
+import javax.persistence.TypedQuery;
+
+import org.apache.ambari.server.orm.entities.PrincipalEntity;
+
 import com.google.inject.Inject;
 import com.google.inject.Provider;
 import com.google.inject.Singleton;
 import com.google.inject.persist.Transactional;
 
-import org.apache.ambari.server.orm.entities.PrincipalEntity;
-import javax.persistence.EntityManager;
-import javax.persistence.TypedQuery;
-
-import java.util.List;
-
 /**
  * Principal Data Access Object.
  */
@@ -82,7 +83,19 @@ public class PrincipalDAO {
    */
   @Transactional
   public void create(PrincipalEntity entity) {
-    entityManagerProvider.get().persist(entity);
+    create(Arrays.asList(entity));
+  }
+
+  /**
+   * Make instances managed and persistent.
+   *
+   * @param entities entities to store
+   */
+  @Transactional
+  public void create(List<PrincipalEntity> entities) {
+    for (PrincipalEntity entity: entities) {
+      entityManagerProvider.get().persist(entity);
+    }
   }
 
   /**

+ 28 - 1
ambari-server/src/main/java/org/apache/ambari/server/orm/dao/PrincipalTypeDAO.java

@@ -22,10 +22,11 @@ import com.google.inject.Inject;
 import com.google.inject.Provider;
 import com.google.inject.Singleton;
 import com.google.inject.persist.Transactional;
-import org.apache.ambari.server.orm.entities.PrincipalTypeEntity;
 
+import org.apache.ambari.server.orm.entities.PrincipalTypeEntity;
 import javax.persistence.EntityManager;
 import javax.persistence.TypedQuery;
+
 import java.util.List;
 
 /**
@@ -80,5 +81,31 @@ public class PrincipalTypeDAO {
   public PrincipalTypeEntity merge(PrincipalTypeEntity entity) {
     return entityManagerProvider.get().merge(entity);
   }
+
+  /**
+   * Creates and returns principal type if it wasn't persisted yet.
+   *
+   * @param principalType id of principal type
+   * @return principal type
+   */
+  public PrincipalTypeEntity ensurePrincipalTypeCreated(int principalType) {
+    PrincipalTypeEntity principalTypeEntity = findById(principalType);
+    if (principalTypeEntity == null) {
+      principalTypeEntity = new PrincipalTypeEntity();
+      principalTypeEntity.setId(principalType);
+      switch (principalType) {
+        case PrincipalTypeEntity.USER_PRINCIPAL_TYPE:
+          principalTypeEntity.setName(PrincipalTypeEntity.USER_PRINCIPAL_TYPE_NAME);
+          break;
+        case PrincipalTypeEntity.GROUP_PRINCIPAL_TYPE:
+          principalTypeEntity.setName(PrincipalTypeEntity.GROUP_PRINCIPAL_TYPE_NAME);
+          break;
+        default:
+          throw new IllegalArgumentException("Unknown principal type ID=" + principalType);
+      }
+      create(principalTypeEntity);
+    }
+    return principalTypeEntity;
+  }
 }
 

+ 38 - 11
ambari-server/src/main/java/org/apache/ambari/server/orm/dao/UserDAO.java

@@ -17,19 +17,24 @@
  */
 package org.apache.ambari.server.orm.dao;
 
-import com.google.inject.Inject;
-import com.google.inject.Provider;
-import com.google.inject.Singleton;
-import com.google.inject.persist.Transactional;
-import org.apache.ambari.server.orm.RequiresSession;
-import org.apache.ambari.server.orm.entities.PrincipalEntity;
-import org.apache.ambari.server.orm.entities.UserEntity;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
 
 import javax.persistence.EntityManager;
 import javax.persistence.NoResultException;
 import javax.persistence.TypedQuery;
-import java.util.Collections;
-import java.util.List;
+
+import org.apache.ambari.server.orm.RequiresSession;
+import org.apache.ambari.server.orm.entities.PrincipalEntity;
+import org.apache.ambari.server.orm.entities.UserEntity;
+
+import com.google.inject.Inject;
+import com.google.inject.Provider;
+import com.google.inject.Singleton;
+import com.google.inject.persist.Transactional;
 
 @Singleton
 public class UserDAO {
@@ -90,8 +95,15 @@ public class UserDAO {
 
   @Transactional
   public void create(UserEntity user) {
-    user.setUserName(user.getUserName().toLowerCase());
-    entityManagerProvider.get().persist(user);
+    create(new HashSet<UserEntity>(Arrays.asList(user)));
+  }
+
+  @Transactional
+  public void create(Set<UserEntity> users) {
+    for (UserEntity user: users) {
+      user.setUserName(user.getUserName().toLowerCase());
+      entityManagerProvider.get().persist(user);
+    }
   }
 
   @Transactional
@@ -100,12 +112,27 @@ public class UserDAO {
     return entityManagerProvider.get().merge(user);
   }
 
+  @Transactional
+  public void merge(Set<UserEntity> users) {
+    for (UserEntity user: users) {
+      user.setUserName(user.getUserName().toLowerCase());
+      entityManagerProvider.get().merge(user);
+    }
+  }
+
   @Transactional
   public void remove(UserEntity user) {
     entityManagerProvider.get().remove(merge(user));
     entityManagerProvider.get().getEntityManagerFactory().getCache().evictAll();
   }
 
+  @Transactional
+  public void remove(Set<UserEntity> users) {
+    for (UserEntity userEntity: users) {
+      entityManagerProvider.get().remove(entityManagerProvider.get().merge(userEntity));
+    }
+  }
+
   @Transactional
   public void removeByPK(Integer userPK) {
     remove(findByPK(userPK));

+ 6 - 1
ambari-server/src/main/java/org/apache/ambari/server/orm/entities/MemberEntity.java

@@ -24,19 +24,24 @@ import javax.persistence.GenerationType;
 import javax.persistence.Id;
 import javax.persistence.JoinColumn;
 import javax.persistence.ManyToOne;
+import javax.persistence.NamedQueries;
+import javax.persistence.NamedQuery;
 import javax.persistence.Table;
 import javax.persistence.TableGenerator;
 import javax.persistence.UniqueConstraint;
 
 @Entity
 @Table(name = "members", uniqueConstraints = {@UniqueConstraint(columnNames = {"group_id", "user_id"})})
+@NamedQueries({
+  @NamedQuery(name = "memberByUserAndGroup", query = "SELECT memberEnt FROM MemberEntity memberEnt where lower(memberEnt.user.userName)=:username AND lower(memberEnt.group.groupName)=:groupname")
+})
 @TableGenerator(name = "member_id_generator",
     table = "ambari_sequences",
     pkColumnName = "sequence_name",
     valueColumnName = "sequence_value",
     pkColumnValue = "member_id_seq",
     initialValue = 1,
-    allocationSize = 1
+    allocationSize = 500
     )
 public class MemberEntity {
   @Id

+ 1 - 1
ambari-server/src/main/java/org/apache/ambari/server/orm/entities/PrincipalEntity.java

@@ -44,7 +44,7 @@ import javax.persistence.TableGenerator;
     table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "sequence_value"
     , pkColumnValue = "principal_id_seq"
     , initialValue = 2
-    , allocationSize = 1
+    , allocationSize = 500
 )
 @NamedQueries({
   @NamedQuery(name = "principalByPrivilegeId", query = "SELECT principal FROM PrincipalEntity principal JOIN principal.privileges privilege WHERE privilege.permission.id=:permission_id")

+ 3 - 2
ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UserEntity.java

@@ -20,6 +20,7 @@ package org.apache.ambari.server.orm.entities;
 import javax.persistence.*;
 
 import java.util.Date;
+import java.util.HashSet;
 import java.util.Set;
 
 @Table(name = "users", uniqueConstraints = {@UniqueConstraint(columnNames = {"user_name", "ldap_user"})})
@@ -32,7 +33,7 @@ import java.util.Set;
     table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "sequence_value"
     , pkColumnValue = "user_id_seq"
     , initialValue = 2
-    , allocationSize = 1
+    , allocationSize = 500
     )
 public class UserEntity {
 
@@ -60,7 +61,7 @@ public class UserEntity {
   private Integer active = 1;
 
   @OneToMany(mappedBy = "user", cascade = CascadeType.ALL)
-  private Set<MemberEntity> memberEntities;
+  private Set<MemberEntity> memberEntities = new HashSet<MemberEntity>();
 
   @OneToOne
   @JoinColumns({

+ 9 - 0
ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ViewEntity.java

@@ -749,6 +749,15 @@ public class ViewEntity implements ViewDefinition {
     this.statusDetail = statusDetail;
   }
 
+  /**
+   * Determine whether or not the entity is loaded.
+   *
+   * @return true if the entity is loaded
+   */
+  public boolean isLoaded() {
+    return status.equals(ViewStatus.LOADED);
+  }
+
   /**
    * Get the internal view name from the given common name and version.
    *

+ 89 - 0
ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ViewInstanceEntity.java

@@ -23,6 +23,8 @@ import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Map;
 import java.util.Set;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
 
 import javax.persistence.Basic;
 import javax.persistence.CascadeType;
@@ -73,6 +75,11 @@ public class ViewInstanceEntity implements ViewInstanceDefinition {
    */
   public static final String VIEWS_CONTEXT_PATH_PREFIX = "/views/";
 
+  /**
+   * The pattern for matching view instance context path.
+   */
+  public static final String VIEWS_CONTEXT_PATH_PATTERN = "" + VIEWS_CONTEXT_PATH_PREFIX + "([^/]+)/([^/]+)/([^/]+)(.*)";
+
   @Id
   @Column(name = "view_instance_id", nullable = false)
   @GeneratedValue(strategy = GenerationType.TABLE, generator = "view_instance_id_generator")
@@ -676,6 +683,25 @@ public class ViewInstanceEntity implements ViewInstanceDefinition {
     return VIEWS_CONTEXT_PATH_PREFIX + viewName + "/" + version + "/" + viewInstanceName;
   }
 
+  /**
+   * Parses context path into view name, version and instance name
+   *
+   * @param contextPath the context path
+   * @return null if context path doesn't match correct pattern
+   */
+  public static ViewInstanceVersionDTO parseContextPath(String contextPath) {
+    final Pattern pattern = Pattern.compile(VIEWS_CONTEXT_PATH_PATTERN);
+    Matcher matcher = pattern.matcher(contextPath);
+    if (!matcher.matches()) {
+      return null;
+    } else {
+      final String viewName = matcher.group(1);
+      final String version = matcher.group(2);
+      final String instanceName = matcher.group(3);
+      return new ViewInstanceVersionDTO(viewName, version, instanceName);
+    }
+  }
+
   /**
    * Get the current user name.
    *
@@ -768,4 +794,67 @@ public class ViewInstanceEntity implements ViewInstanceDefinition {
     result = 31 * result + name.hashCode();
     return result;
   }
+
+  //----- ViewInstanceVersionDTO inner class --------------------------------------------------
+
+  /**
+   * Keeps information about view name, version and instance name.
+   */
+  public static class ViewInstanceVersionDTO {
+
+    /**
+     * View name.
+     */
+    private final String viewName;
+
+    /**
+     * View version.
+     */
+    private final String version;
+
+    /**
+     * View instance name.
+     */
+    private final String instanceName;
+
+    /**
+     * Constructor.
+     *
+     * @param viewName view name
+     * @param version view version
+     * @param instanceName view instance name
+     */
+    public ViewInstanceVersionDTO(String viewName, String version, String instanceName) {
+      this.viewName = viewName;
+      this.version = version;
+      this.instanceName = instanceName;
+    }
+
+    /**
+     * Get the view name.
+     *
+     * @return the view name
+     */
+    public String getViewName() {
+      return viewName;
+    }
+
+    /**
+     * Get the view version.
+     *
+     * @return the view version
+     */
+    public String getVersion() {
+      return version;
+    }
+
+    /**
+     * Get the view instance name.
+     *
+     * @return the view instance name
+     */
+    public String getInstanceName() {
+      return instanceName;
+    }
+  }
 }

+ 1 - 1
ambari-server/src/main/java/org/apache/ambari/server/security/SecurityFilter.java

@@ -35,7 +35,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
 public class SecurityFilter implements Filter {
-  
+
   //Allowed pathes for one way auth https
   private static String CA = "/ca";
 

+ 26 - 9
ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariAuthorizationFilter.java

@@ -18,19 +18,28 @@
 
 package org.apache.ambari.server.security.authorization;
 
+import java.io.IOException;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
 import org.apache.ambari.server.orm.entities.PermissionEntity;
 import org.apache.ambari.server.orm.entities.PrivilegeEntity;
+import org.apache.ambari.server.orm.entities.ViewInstanceEntity;
+import org.apache.ambari.server.orm.entities.ViewInstanceEntity.ViewInstanceVersionDTO;
 import org.apache.ambari.server.security.authorization.internal.InternalAuthenticationToken;
+import org.apache.ambari.server.view.ViewRegistry;
 import org.springframework.security.core.Authentication;
 import org.springframework.security.core.GrantedAuthority;
 import org.springframework.security.core.context.SecurityContext;
 import org.springframework.security.core.context.SecurityContextHolder;
 
-import javax.servlet.*;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import java.io.IOException;
-
 public class AmbariAuthorizationFilter implements Filter {
 
   private static final String REALM_PARAM = "realm";
@@ -43,7 +52,6 @@ public class AmbariAuthorizationFilter implements Filter {
    */
   private String realm;
 
-
   @Override
   public void init(FilterConfig filterConfig) throws ServletException {
     realm = getParameterValue(filterConfig, REALM_PARAM, DEFAULT_REALM);
@@ -51,9 +59,11 @@ public class AmbariAuthorizationFilter implements Filter {
 
   @Override
   public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException {
-    HttpServletRequest httpRequest = (HttpServletRequest) request;
+    HttpServletRequest  httpRequest  = (HttpServletRequest) request;
     HttpServletResponse httpResponse = (HttpServletResponse) response;
 
+    String requestURI = httpRequest.getRequestURI();
+
     SecurityContext context = getSecurityContext();
 
     Authentication authentication = context.getAuthentication();
@@ -71,7 +81,6 @@ public class AmbariAuthorizationFilter implements Filter {
           AmbariGrantedAuthority ambariGrantedAuthority = (AmbariGrantedAuthority) grantedAuthority;
 
           PrivilegeEntity privilegeEntity = ambariGrantedAuthority.getPrivilegeEntity();
-          String          requestURI      = httpRequest.getRequestURI();
           Integer         permissionId    = privilegeEntity.getPermission().getId();
 
           // admin has full access
@@ -101,7 +110,15 @@ public class AmbariAuthorizationFilter implements Filter {
           }
         }
       }
-      if (!authorized && !httpRequest.getMethod().equals("GET")) {
+
+      if (!authorized && requestURI.matches(ViewInstanceEntity.VIEWS_CONTEXT_PATH_PATTERN)) {
+        final ViewInstanceVersionDTO dto = ViewInstanceEntity.parseContextPath(requestURI);
+        authorized = ViewRegistry.getInstance().checkPermission(dto.getViewName(), dto.getVersion(), dto.getInstanceName(), true);
+      }
+
+      // allow GET for everything except views
+      if (!authorized &&
+          (!httpRequest.getMethod().equals("GET") || requestURI.matches("/views.*"))) {
 
         httpResponse.setHeader("WWW-Authenticate", "Basic realm=\"" + realm + "\"");
         httpResponse.sendError(HttpServletResponse.SC_FORBIDDEN, "You do not have permissions to access this resource.");

+ 1 - 1
ambari-server/src/main/java/org/apache/ambari/server/security/authorization/User.java

@@ -38,7 +38,7 @@ public class User {
   final Collection<String> groups = new ArrayList<String>();
   boolean admin = false;
 
-  User(UserEntity userEntity) {
+  public User(UserEntity userEntity) {
     userId = userEntity.getUserId();
     userName = userEntity.getUserName();
     createTime = userEntity.getCreateTime();

+ 156 - 2
ambari-server/src/main/java/org/apache/ambari/server/security/authorization/Users.java

@@ -19,10 +19,14 @@ package org.apache.ambari.server.security.authorization;
 
 import java.util.ArrayList;
 import java.util.Collection;
+import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Map;
 import java.util.Set;
 
+import javax.persistence.EntityManager;
+
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.orm.dao.GroupDAO;
@@ -40,6 +44,8 @@ import org.apache.ambari.server.orm.entities.PrincipalEntity;
 import org.apache.ambari.server.orm.entities.PrincipalTypeEntity;
 import org.apache.ambari.server.orm.entities.PrivilegeEntity;
 import org.apache.ambari.server.orm.entities.UserEntity;
+import org.apache.ambari.server.security.ldap.LdapBatchDto;
+import org.apache.ambari.server.security.ldap.LdapUserGroupMemberDto;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.security.authentication.BadCredentialsException;
@@ -49,6 +55,7 @@ import org.springframework.security.core.context.SecurityContextHolder;
 import org.springframework.security.crypto.password.PasswordEncoder;
 
 import com.google.inject.Inject;
+import com.google.inject.Provider;
 import com.google.inject.Singleton;
 import com.google.inject.persist.Transactional;
 
@@ -60,6 +67,8 @@ public class Users {
 
   private final static Logger LOG = LoggerFactory.getLogger(Users.class);
 
+  @Inject
+  Provider<EntityManager> entityManagerProvider;
   @Inject
   protected UserDAO userDAO;
   @Inject
@@ -227,7 +236,7 @@ public class Users {
    * Creates new local user with provided userName and password.
    */
   public void createUser(String userName, String password) {
-    createUser(userName, password, true, false);
+    createUser(userName, password, true, false, false);
   }
 
   /**
@@ -237,9 +246,10 @@ public class Users {
    * @param password password
    * @param active is user active
    * @param admin is user admin
+   * @param ldapUser is user LDAP
    */
   @Transactional
-  public synchronized void createUser(String userName, String password, Boolean active, Boolean admin) {
+  public synchronized void createUser(String userName, String password, Boolean active, Boolean admin, Boolean ldapUser) {
 
     // create an admin principal to represent this user
     PrincipalTypeEntity principalTypeEntity = principalTypeDAO.findById(PrincipalTypeEntity.USER_PRINCIPAL_TYPE);
@@ -260,6 +270,9 @@ public class Users {
     if (active != null) {
       userEntity.setActive(active);
     }
+    if (ldapUser != null) {
+      userEntity.setLdapUser(ldapUser);
+    }
 
     userDAO.create(userEntity);
 
@@ -510,4 +523,145 @@ public class Users {
     return false;
   }
 
+  /**
+   * Executes batch queries to database to insert large amounts of LDAP data.
+   *
+   * @param batchInfo DTO with batch information
+   */
+  public void processLdapSync(LdapBatchDto batchInfo) {
+    final Map<String, UserEntity> allUsers = new HashMap<String, UserEntity>();
+    final Map<String, GroupEntity> allGroups = new HashMap<String, GroupEntity>();
+
+    // prefetch all user and group data to avoid heavy queries in membership creation
+
+    for (UserEntity userEntity: userDAO.findAll()) {
+      allUsers.put(userEntity.getUserName(), userEntity);
+    }
+
+    for (GroupEntity groupEntity: groupDAO.findAll()) {
+      allGroups.put(groupEntity.getGroupName(), groupEntity);
+    }
+
+    final PrincipalTypeEntity userPrincipalType = principalTypeDAO
+        .ensurePrincipalTypeCreated(PrincipalTypeEntity.USER_PRINCIPAL_TYPE);
+    final PrincipalTypeEntity groupPrincipalType = principalTypeDAO
+        .ensurePrincipalTypeCreated(PrincipalTypeEntity.GROUP_PRINCIPAL_TYPE);
+
+    // remove users
+    final Set<UserEntity> usersToRemove = new HashSet<UserEntity>();
+    for (String userName: batchInfo.getUsersToBeRemoved()) {
+      UserEntity userEntity = userDAO.findLocalUserByName(userName);
+      if (userEntity == null) {
+        userEntity = userDAO.findLdapUserByName(userName);
+        if (userEntity == null) {
+          continue;
+        }
+      }
+      allUsers.remove(userEntity.getUserName());
+      usersToRemove.add(userEntity);
+    }
+    userDAO.remove(usersToRemove);
+
+    // remove groups
+    final Set<GroupEntity> groupsToRemove = new HashSet<GroupEntity>();
+    for (String groupName: batchInfo.getGroupsToBeRemoved()) {
+      final GroupEntity groupEntity = groupDAO.findGroupByName(groupName);
+      allGroups.remove(groupEntity.getGroupName());
+      groupsToRemove.add(groupEntity);
+    }
+    groupDAO.remove(groupsToRemove);
+
+    // update users
+    final Set<UserEntity> usersToBecomeLdap = new HashSet<UserEntity>();
+    for (String userName: batchInfo.getUsersToBecomeLdap()) {
+      UserEntity userEntity = userDAO.findLocalUserByName(userName);
+      if (userEntity == null) {
+        userEntity = userDAO.findLdapUserByName(userName);
+        if (userEntity == null) {
+          continue;
+        }
+      }
+      userEntity.setLdapUser(true);
+      allUsers.put(userEntity.getUserName(), userEntity);
+      usersToBecomeLdap.add(userEntity);
+    }
+    userDAO.merge(usersToBecomeLdap);
+
+    // update groups
+    final Set<GroupEntity> groupsToBecomeLdap = new HashSet<GroupEntity>();
+    for (String groupName: batchInfo.getGroupsToBecomeLdap()) {
+      final GroupEntity groupEntity = groupDAO.findGroupByName(groupName);
+      groupEntity.setLdapGroup(true);
+      allGroups.put(groupEntity.getGroupName(), groupEntity);
+      groupsToBecomeLdap.add(groupEntity);
+    }
+    groupDAO.merge(groupsToBecomeLdap);
+
+    // prepare create principals
+    final List<PrincipalEntity> principalsToCreate = new ArrayList<PrincipalEntity>();
+
+    // prepare create users
+    final Set<UserEntity> usersToCreate = new HashSet<UserEntity>();
+    for (String userName: batchInfo.getUsersToBeCreated()) {
+      final PrincipalEntity principalEntity = new PrincipalEntity();
+      principalEntity.setPrincipalType(userPrincipalType);
+      principalsToCreate.add(principalEntity);
+
+      final UserEntity userEntity = new UserEntity();
+      userEntity.setUserName(userName);
+      userEntity.setUserPassword("");
+      userEntity.setPrincipal(principalEntity);
+      userEntity.setLdapUser(true);
+
+      allUsers.put(userEntity.getUserName(), userEntity);
+      usersToCreate.add(userEntity);
+    }
+
+    // prepare create groups
+    final Set<GroupEntity> groupsToCreate = new HashSet<GroupEntity>();
+    for (String groupName: batchInfo.getGroupsToBeCreated()) {
+      final PrincipalEntity principalEntity = new PrincipalEntity();
+      principalEntity.setPrincipalType(groupPrincipalType);
+      principalsToCreate.add(principalEntity);
+
+      final GroupEntity groupEntity = new GroupEntity();
+      groupEntity.setGroupName(groupName);
+      groupEntity.setPrincipal(principalEntity);
+      groupEntity.setLdapGroup(true);
+
+      allGroups.put(groupEntity.getGroupName(), groupEntity);
+      groupsToCreate.add(groupEntity);
+    }
+
+    // create users and groups
+    principalDAO.create(principalsToCreate);
+    userDAO.create(usersToCreate);
+    groupDAO.create(groupsToCreate);
+
+    // remove membership
+    final Set<MemberEntity> membersToRemove = new HashSet<MemberEntity>();
+    for (LdapUserGroupMemberDto member: batchInfo.getMembershipToRemove()) {
+      membersToRemove.add(memberDAO.findByUserAndGroup(member.getUserName(), member.getGroupName()));
+    }
+    memberDAO.remove(membersToRemove);
+
+    // create membership
+    final Set<MemberEntity> membersToCreate = new HashSet<MemberEntity>();
+    final Set<GroupEntity> groupsToUpdate = new HashSet<GroupEntity>();
+    for (LdapUserGroupMemberDto member: batchInfo.getMembershipToAdd()) {
+      final MemberEntity memberEntity = new MemberEntity();
+      final GroupEntity groupEntity = allGroups.get(member.getGroupName());
+      memberEntity.setGroup(groupEntity);
+      memberEntity.setUser(allUsers.get(member.getUserName()));
+      groupEntity.getMemberEntities().add(memberEntity);
+      groupsToUpdate.add(groupEntity);
+      membersToCreate.add(memberEntity);
+    }
+    memberDAO.create(membersToCreate);
+    groupDAO.merge(groupsToUpdate); // needed for Derby DB as it doesn't fetch newly added members automatically
+
+    // clear cached entities
+    entityManagerProvider.get().getEntityManagerFactory().getCache().evictAll();
+  }
+
 }

+ 127 - 106
ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapDataPopulator.java → ambari-server/src/main/java/org/apache/ambari/server/security/ldap/AmbariLdapDataPopulator.java

@@ -15,9 +15,10 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.ambari.server.security.authorization;
+package org.apache.ambari.server.security.ldap;
 
 import java.util.Collection;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
@@ -30,6 +31,10 @@ import javax.naming.directory.Attributes;
 
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.security.authorization.Group;
+import org.apache.ambari.server.security.authorization.LdapServerProperties;
+import org.apache.ambari.server.security.authorization.User;
+import org.apache.ambari.server.security.authorization.Users;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.springframework.ldap.core.AttributesMapper;
@@ -37,7 +42,6 @@ import org.springframework.ldap.core.ContextMapper;
 import org.springframework.ldap.core.DirContextAdapter;
 import org.springframework.ldap.core.LdapTemplate;
 import org.springframework.ldap.core.support.LdapContextSource;
-import org.springframework.ldap.filter.AndFilter;
 import org.springframework.ldap.filter.EqualsFilter;
 import org.springframework.security.core.userdetails.UsernameNotFoundException;
 
@@ -89,7 +93,13 @@ public class AmbariLdapDataPopulator {
     }
     try {
       final LdapTemplate ldapTemplate = loadLdapTemplate();
-      ldapTemplate.list(ldapServerProperties.getBaseDN());
+      ldapTemplate.search(ldapServerProperties.getBaseDN(), "uid=dummy_search", new AttributesMapper() {
+
+        @Override
+        public Object mapFromAttributes(Attributes arg0) throws NamingException {
+          return null;
+        }
+      });
       return true;
     } catch (Exception ex) {
       LOG.error("Could not connect to LDAP server - " + ex.getMessage());
@@ -98,49 +108,38 @@ public class AmbariLdapDataPopulator {
   }
 
   /**
-   * Retrieves a key-value map of all LDAP groups.
+   * Retrieves information about external groups and users and their synced/unsynced state.
    *
-   * @return map of GroupName-Synced pairs
+   * @return dto with information
    */
-  public Map<String, Boolean> getLdapGroupsSyncInfo() {
-    final Map<String, Boolean> ldapGroups = new HashMap<String, Boolean>();
+  public LdapSyncDto getLdapSyncInfo() {
+    final LdapSyncDto syncInfo = new LdapSyncDto();
+
     final Map<String, Group> internalGroupsMap = getInternalGroups();
-    final Set<String> externalGroups = getExternalLdapGroupNames();
-    for (String externalGroup : externalGroups) {
-      if (internalGroupsMap.containsKey(externalGroup)
-          && internalGroupsMap.get(externalGroup).isLdapGroup()) {
-        ldapGroups.put(externalGroup, true);
+    final Set<LdapGroupDto> externalGroups = getExternalLdapGroupInfo();
+    for (LdapGroupDto externalGroup : externalGroups) {
+      if (internalGroupsMap.containsKey(externalGroup.getGroupName())
+          && internalGroupsMap.get(externalGroup.getGroupName()).isLdapGroup()) {
+        externalGroup.setSynced(true);
       } else {
-        ldapGroups.put(externalGroup, false);
+        externalGroup.setSynced(false);
       }
     }
 
-    return ldapGroups;
-  }
-
-  /**
-   * Retrieves a key-value map of all LDAP users.
-   *
-   * @return map of UserName-Synced pairs.
-   */
-  public Map<String, Boolean> getLdapUsersSyncInfo() {
-    final Map<String, Boolean> ldapUsers = new HashMap<String, Boolean>();
-    final List<User> internalUsers = users.getAllUsers();
-    final Map<String, User> internalUsersMap = new HashMap<String, User>();
-    for (User user : internalUsers) {
-      internalUsersMap.put(user.getUserName(), user);
-    }
-    final Set<String> externalUsers = getExternalLdapUserNames();
-    for (String externalUser : externalUsers) {
+    final Map<String, User> internalUsersMap = getInternalUsers();
+    final Set<LdapUserDto> externalUsers = getExternalLdapUserInfo();
+    for (LdapUserDto externalUser : externalUsers) {
       if (internalUsersMap.containsKey(externalUser)
           && internalUsersMap.get(externalUser).isLdapUser()) {
-        ldapUsers.put(externalUser, true);
+        externalUser.setSynced(true);
       } else {
-        ldapUsers.put(externalUser, false);
+        externalUser.setSynced(false);
       }
     }
 
-    return ldapUsers;
+    syncInfo.setGroups(externalGroups);
+    syncInfo.setUsers(externalUsers);
+    return syncInfo;
   }
 
   /**
@@ -150,93 +149,117 @@ public class AmbariLdapDataPopulator {
    * @param groups set of groups to synchronize
    * @throws AmbariException if synchronization failed for any reason
    */
-  public void synchronizeLdapUsersAndGroups(Set<String> users,
+  public LdapBatchDto synchronizeLdapUsersAndGroups(Set<String> users,
       Set<String> groups) throws AmbariException {
+    final LdapBatchDto batchInfo = new LdapBatchDto();
+
     // validate request
-    final Set<String> externalUsers = getExternalLdapUserNames();
+    final Set<LdapUserDto> externalUsers = getExternalLdapUserInfo();
+    final Map<String, LdapUserDto> externalUsersMap = new HashMap<String, LdapUserDto>();
+    for (LdapUserDto user: externalUsers) {
+      externalUsersMap.put(user.getUserName(), user);
+    }
     for (String user : users) {
-      if (!externalUsers.contains(user)) {
+      if (!externalUsersMap.containsKey(user)) {
         throw new AmbariException("Couldn't sync LDAP user " + user
             + ", it doesn't exist");
       }
     }
-    final Set<String> externalGroups = getExternalLdapGroupNames();
+    final Set<LdapGroupDto> externalGroups = getExternalLdapGroupInfo();
+    final Map<String, LdapGroupDto> externalGroupsMap = new HashMap<String, LdapGroupDto>();
+    for (LdapGroupDto group: externalGroups) {
+      externalGroupsMap.put(group.getGroupName(), group);
+    }
     for (String group : groups) {
-      if (!externalGroups.contains(group)) {
+      if (!externalGroupsMap.containsKey(group)) {
         throw new AmbariException("Couldn't sync LDAP group " + group
             + ", it doesn't exist");
       }
     }
 
-    // processing groups
     final Map<String, Group> internalGroupsMap = getInternalGroups();
+    final Map<String, User> internalUsersMap = getInternalUsers();
+
+    // processing groups
     for (String groupName : groups) {
       if (internalGroupsMap.containsKey(groupName)) {
         final Group group = internalGroupsMap.get(groupName);
         if (!group.isLdapGroup()) {
-          this.users.setGroupLdap(groupName);
+          batchInfo.getGroupsToBecomeLdap().add(groupName);
         }
       } else {
-        this.users.createGroup(groupName);
-        this.users.setGroupLdap(groupName);
+        batchInfo.getGroupsToBeCreated().add(groupName);
       }
-      refreshGroupMembers(groupName);
+      refreshGroupMembers(batchInfo, externalGroupsMap.get(groupName), internalUsersMap, externalUsers);
       internalGroupsMap.remove(groupName);
     }
     for (Entry<String, Group> internalGroup : internalGroupsMap.entrySet()) {
       if (internalGroup.getValue().isLdapGroup()) {
-        this.users.removeGroup(internalGroup.getValue());
+        batchInfo.getGroupsToBeRemoved().add(internalGroup.getValue().getGroupName());
       }
     }
 
-    cleanUpLdapUsersWithoutGroup();
-
     // processing users
-    final Map<String, User> internalUsersMap = getInternalUsers();
     for (String userName : users) {
       if (internalUsersMap.containsKey(userName)) {
         final User user = internalUsersMap.get(userName);
-        if (!user.isLdapUser()) {
-          this.users.setUserLdap(userName);
+        if (user != null && !user.isLdapUser()) {
+          batchInfo.getUsersToBecomeLdap().add(userName);
         }
       } else {
-        this.users.createUser(userName, "", true, false);
-        this.users.setUserLdap(userName);
+        batchInfo.getUsersToBeCreated().add(userName);
       }
     }
 
+    return batchInfo;
   }
 
   /**
    * Check group members of the synced group: add missing ones and remove the ones absent in external LDAP.
    *
    * @param groupName group name
+   * @param internalUsers map of internal users
+   * @param externalUsers set of external users
    * @throws AmbariException if group refresh failed
    */
-  protected void refreshGroupMembers(String groupName) throws AmbariException {
-    final Set<String> externalMembers = getExternalLdapGroupMembers(groupName);
-    final Map<String, User> internalUsers = getInternalUsers();
-    final Map<String, User> internalMembers = getInternalMembers(groupName);
+  protected void refreshGroupMembers(LdapBatchDto batchInfo, LdapGroupDto group, Map<String, User> internalUsers, Set<LdapUserDto> externalUsers) throws AmbariException {
+    final Set<String> externalMembers = new HashSet<String>();
+    for (String memberAttribute: group.getMemberAttributes()) {
+      for (LdapUserDto externalUser: externalUsers) {
+        // memberAttribute may be either DN or UID, check both
+        if (externalUser.getDn().equals(memberAttribute) || externalUser.getUid().equals(memberAttribute)) {
+          externalMembers.add(externalUser.getUserName());
+          break;
+        }
+      }
+    }
+    final Map<String, User> internalMembers = getInternalMembers(group.getGroupName());
     for (String externalMember: externalMembers) {
       if (internalUsers.containsKey(externalMember)) {
         final User user = internalUsers.get(externalMember);
+        if (user == null) {
+          // user is fresh and is already added to batch info
+          if (!internalMembers.containsKey(externalMember)) {
+            batchInfo.getMembershipToAdd().add(new LdapUserGroupMemberDto(group.getGroupName(), externalMember));
+          }
+          continue;
+        }
         if (!user.isLdapUser()) {
-          users.setUserLdap(externalMember);
+          batchInfo.getUsersToBecomeLdap().add(externalMember);
         }
         if (!internalMembers.containsKey(externalMember)) {
-          users.addMemberToGroup(groupName, externalMember);
+          batchInfo.getMembershipToAdd().add(new LdapUserGroupMemberDto(group.getGroupName(), externalMember));
         }
         internalMembers.remove(externalMember);
-        internalUsers.remove(externalMember);
       } else {
-        users.createUser(externalMember, "");
-        users.setUserLdap(externalMember);
-        users.addMemberToGroup(groupName, externalMember);
+        batchInfo.getUsersToBeCreated().add(externalMember);
+        batchInfo.getMembershipToAdd().add(new LdapUserGroupMemberDto(group.getGroupName(), externalMember));
+        internalUsers.put(externalMember, null);
       }
     }
     for (Entry<String, User> userToBeUnsynced: internalMembers.entrySet()) {
       final User user = userToBeUnsynced.getValue();
-      users.removeMemberFromGroup(groupName, user.getUserName());
+      batchInfo.getMembershipToRemove().add(new LdapUserGroupMemberDto(group.getGroupName(), user.getUserName()));
     }
   }
 
@@ -259,20 +282,32 @@ public class AmbariLdapDataPopulator {
   /**
    * Retrieves groups from external LDAP server.
    *
-   * @return set of user names
+   * @return set of info about LDAP groups
    */
-  protected Set<String> getExternalLdapGroupNames() {
-    final Set<String> groups = new HashSet<String>();
+  protected Set<LdapGroupDto> getExternalLdapGroupInfo() {
+    final Set<LdapGroupDto> groups = new HashSet<LdapGroupDto>();
     final LdapTemplate ldapTemplate = loadLdapTemplate();
     final EqualsFilter equalsFilter = new EqualsFilter("objectClass",
         ldapServerProperties.getGroupObjectClass());
     String baseDn = ldapServerProperties.getBaseDN();
-    ldapTemplate.search(baseDn, equalsFilter.encode(), new AttributesMapper() {
+    ldapTemplate.search(baseDn, equalsFilter.encode(), new ContextMapper() {
+
+      @Override
+      public Object mapFromContext(Object ctx) {
+        final DirContextAdapter adapter = (DirContextAdapter) ctx;
 
-      public Object mapFromAttributes(Attributes attributes)
-          throws NamingException {
-        groups.add(attributes.get(ldapServerProperties.getGroupNamingAttr())
-            .get().toString().toLowerCase());
+        final LdapGroupDto group = new LdapGroupDto();
+        final String groupNameAttribute = adapter.getStringAttribute(ldapServerProperties.getGroupNamingAttr());
+        group.setGroupName(groupNameAttribute.toLowerCase());
+
+        final String[] uniqueMembers = adapter.getStringAttributes(ldapServerProperties.getGroupMembershipAttr());
+        if (uniqueMembers != null) {
+          for (String uniqueMember: uniqueMembers) {
+            group.getMemberAttributes().add(uniqueMember.toLowerCase());
+          }
+        }
+
+        groups.add(group);
         return null;
       }
     });
@@ -282,51 +317,35 @@ public class AmbariLdapDataPopulator {
   /**
    * Retrieves users from external LDAP server.
    *
-   * @return set of user names
+   * @return set of info about LDAP users
    */
-  protected Set<String> getExternalLdapUserNames() {
-    final Set<String> users = new HashSet<String>();
+  protected Set<LdapUserDto> getExternalLdapUserInfo() {
+    final Set<LdapUserDto> users = new HashSet<LdapUserDto>();
     final LdapTemplate ldapTemplate = loadLdapTemplate();
     final EqualsFilter equalsFilter = new EqualsFilter("objectClass",
         ldapServerProperties.getUserObjectClass());
     String baseDn = ldapServerProperties.getBaseDN();
-    ldapTemplate.search(baseDn, equalsFilter.encode(), new AttributesMapper() {
-
-      public Object mapFromAttributes(Attributes attributes)
-          throws NamingException {
-        users.add(attributes.get(ldapServerProperties.getUsernameAttribute())
-            .get().toString().toLowerCase());
-        return null;
-      }
-    });
-    return users;
-  }
-
-  /**
-   * Retrieves members of the specified group from external LDAP server.
-   *
-   * @param groupName group name
-   * @return set of group names
-   */
-  protected Set<String> getExternalLdapGroupMembers(String groupName) {
-    final Set<String> members = new HashSet<String>();
-    final LdapTemplate ldapTemplate = loadLdapTemplate();
-    final AndFilter andFilter = new AndFilter();
-    andFilter.and(new EqualsFilter("objectClass", ldapServerProperties.getGroupObjectClass()));
-    andFilter.and(new EqualsFilter(ldapServerProperties.getGroupNamingAttr(), groupName));
-    String baseDn = ldapServerProperties.getBaseDN();
-    ldapTemplate.search(baseDn, andFilter.encode(), new ContextMapper() {
+    ldapTemplate.search(baseDn, equalsFilter.encode(), new ContextMapper() {
 
+      @Override
       public Object mapFromContext(Object ctx) {
+        final LdapUserDto user = new LdapUserDto();
         final DirContextAdapter adapter  = (DirContextAdapter) ctx;
-        for (String uniqueMember: adapter.getStringAttributes(ldapServerProperties.getGroupMembershipAttr())) {
-          final DirContextAdapter userAdapter = (DirContextAdapter) ldapTemplate.lookup(uniqueMember);
-          members.add(userAdapter.getStringAttribute(ldapServerProperties.getUsernameAttribute()).toLowerCase());
+        final String usernameAttribute = adapter.getStringAttribute(ldapServerProperties.getUsernameAttribute());
+        final String uidAttribute = adapter.getStringAttribute("uid");
+        if (usernameAttribute != null && uidAttribute != null) {
+          user.setUserName(usernameAttribute.toLowerCase());
+          user.setUid(uidAttribute.toLowerCase());
+          user.setDn(adapter.getNameInNamespace().toLowerCase());
+        } else {
+          LOG.warn("Ignoring LDAP user " + adapter.getNameInNamespace() + " as it doesn't have required" +
+              " attributes uid and " + ldapServerProperties.getUsernameAttribute());
         }
+        users.add(user);
         return null;
       }
     });
-    return members;
+    return users;
   }
 
   /**
@@ -365,6 +384,9 @@ public class AmbariLdapDataPopulator {
    */
   protected Map<String, User> getInternalMembers(String groupName) {
     final Collection<User> internalMembers = users.getGroupMembers(groupName);
+    if (internalMembers == null) {
+      return Collections.emptyMap();
+    }
     final Map<String, User> internalMembersMap = new HashMap<String, User>();
     for (User user : internalMembers) {
       internalMembersMap.put(user.getUserName(), user);
@@ -373,7 +395,7 @@ public class AmbariLdapDataPopulator {
   }
 
   /**
-   * Checks LDAP configuration for changes and reloads LDAP template if they occured.
+   * Checks LDAP configuration for changes and reloads LDAP template if they occurred.
    *
    * @return LdapTemplate instance
    */
@@ -390,8 +412,7 @@ public class AmbariLdapDataPopulator {
 
       if (!ldapServerProperties.isAnonymousBind()) {
         ldapContextSource.setUserDn(ldapServerProperties.getManagerDn());
-        ldapContextSource
-            .setPassword(ldapServerProperties.getManagerPassword());
+        ldapContextSource.setPassword(ldapServerProperties.getManagerPassword());
       }
 
       try {

+ 67 - 0
ambari-server/src/main/java/org/apache/ambari/server/security/ldap/LdapBatchDto.java

@@ -0,0 +1,67 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.security.ldap;
+
+import java.util.HashSet;
+import java.util.Set;
+
+/**
+ * Contains information for batch database update on LDAP synchronization.
+ */
+public class LdapBatchDto {
+  private final Set<String> groupsToBecomeLdap = new HashSet<String>();
+  private final Set<String> groupsToBeCreated = new HashSet<String>();
+  private final Set<String> groupsToBeRemoved = new HashSet<String>();
+  private final Set<String> usersToBecomeLdap = new HashSet<String>();
+  private final Set<String> usersToBeCreated = new HashSet<String>();
+  private final Set<String> usersToBeRemoved = new HashSet<String>();
+  private final Set<LdapUserGroupMemberDto> membershipToAdd = new HashSet<LdapUserGroupMemberDto>();
+  private final Set<LdapUserGroupMemberDto> membershipToRemove = new HashSet<LdapUserGroupMemberDto>();
+
+  public Set<String> getGroupsToBecomeLdap() {
+    return groupsToBecomeLdap;
+  }
+
+  public Set<String> getGroupsToBeCreated() {
+    return groupsToBeCreated;
+  }
+
+  public Set<String> getUsersToBecomeLdap() {
+    return usersToBecomeLdap;
+  }
+
+  public Set<String> getUsersToBeCreated() {
+    return usersToBeCreated;
+  }
+
+  public Set<LdapUserGroupMemberDto> getMembershipToAdd() {
+    return membershipToAdd;
+  }
+
+  public Set<LdapUserGroupMemberDto> getMembershipToRemove() {
+    return membershipToRemove;
+  }
+
+  public Set<String> getGroupsToBeRemoved() {
+    return groupsToBeRemoved;
+  }
+
+  public Set<String> getUsersToBeRemoved() {
+    return usersToBeRemoved;
+  }
+}

+ 113 - 0
ambari-server/src/main/java/org/apache/ambari/server/security/ldap/LdapGroupDto.java

@@ -0,0 +1,113 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.security.ldap;
+
+import java.util.HashSet;
+import java.util.Set;
+
+/**
+ * Pojo with information about LDAP group of users.
+ */
+public class LdapGroupDto {
+  /**
+   * Name of the group.
+   */
+  private String groupName;
+
+  /**
+   * Set of member attributes. Usually it's either UID or DN of users.
+   */
+  private Set<String> memberAttributes = new HashSet<String>();
+
+  /**
+   * Determines if the LDAP group is synchronized with internal group in database.
+   */
+  private boolean synced;
+
+  /**
+   * Get the group name.
+   *
+   * @return the group name
+   */
+  public String getGroupName() {
+    return groupName;
+  }
+
+  /**
+   * Set the group name.
+   *
+   * @param groupName the group name
+   */
+  public void setGroupName(String groupName) {
+    this.groupName = groupName;
+  }
+
+  /**
+   * Get the member attributes.
+   *
+   * @return the set of member attributes
+   */
+  public Set<String> getMemberAttributes() {
+    return memberAttributes;
+  }
+
+  /**
+   * Set the member attributes.
+   *
+   * @param memberAttributes the member attributes
+   */
+  public void setMemberAttributes(Set<String> memberAttributes) {
+    this.memberAttributes = memberAttributes;
+  }
+
+  /**
+   * Get the synced flag.
+   *
+   * @return the synced flag
+   */
+  public boolean isSynced() {
+    return synced;
+  }
+
+  /**
+   * Set the synced flag
+   *
+   * @param synced the synced flag
+   */
+  public void setSynced(boolean synced) {
+    this.synced = synced;
+  }
+
+  @Override
+  public int hashCode() {
+    int result = groupName != null ? groupName.hashCode() : 0;
+    return result;
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) return true;
+    if (o == null || getClass() != o.getClass()) return false;
+
+    LdapGroupDto that = (LdapGroupDto) o;
+
+    if (groupName != null ? !groupName.equals(that.getGroupName()) : that.getGroupName() != null) return false;
+
+    return true;
+  }
+}

+ 72 - 0
ambari-server/src/main/java/org/apache/ambari/server/security/ldap/LdapSyncDto.java

@@ -0,0 +1,72 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.security.ldap;
+
+import java.util.HashSet;
+import java.util.Set;
+
+/**
+ * Pojo with information about LDAP groups and users.
+ */
+public class LdapSyncDto {
+  /**
+   * LDAP groups.
+   */
+  private Set<LdapGroupDto> groups = new HashSet<LdapGroupDto>();
+
+  /**
+   * LDAP users.
+   */
+  private Set<LdapUserDto> users = new HashSet<LdapUserDto>();
+
+  /**
+   * Get the LDAP groups.
+   *
+   * @return the LDAP groups
+   */
+  public Set<LdapGroupDto> getGroups() {
+    return groups;
+  }
+
+  /**
+   * Set the LDAP groups.
+   *
+   * @param groups the LDAP groups
+   */
+  public void setGroups(Set<LdapGroupDto> groups) {
+    this.groups = groups;
+  }
+
+  /**
+   * Get the LDAP users.
+   *
+   * @return the LDAP users
+   */
+  public Set<LdapUserDto> getUsers() {
+    return users;
+  }
+
+  /**
+   * Set the LDAP users.
+   *
+   * @param users the LDAP users
+   */
+  public void setUsers(Set<LdapUserDto> users) {
+    this.users = users;
+  }
+}

+ 133 - 0
ambari-server/src/main/java/org/apache/ambari/server/security/ldap/LdapUserDto.java

@@ -0,0 +1,133 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.security.ldap;
+
+/**
+ * Pojo with information about LDAP user.
+ */
+public class LdapUserDto {
+  /**
+   * Name of the user. Should be always unique.
+   */
+  private String userName;
+
+  /**
+   * Determines if the LDAP user is synchronized with internal user in database.
+   */
+  private boolean synced;
+
+  /**
+   * Unique identifier from LDAP.
+   */
+  private String uid;
+
+  /**
+   * Distinguished name from LDAP.
+   */
+  private String dn;
+
+  /**
+   * Get the user name.
+   *
+   * @return the user name
+   */
+  public String getUserName() {
+    return userName;
+  }
+
+  /**
+   * Set the user name.
+   *
+   * @param userName the user name
+   */
+  public void setUserName(String userName) {
+    this.userName = userName;
+  }
+
+  /**
+   * Get the synced flag.
+   *
+   * @return the synced flag
+   */
+  public boolean isSynced() {
+    return synced;
+  }
+
+  /**
+   * Set the synced flag
+   *
+   * @param synced the synced flag
+   */
+  public void setSynced(boolean synced) {
+    this.synced = synced;
+  }
+
+  /**
+   * Get the UID.
+   *
+   * @return the UID
+   */
+  public String getUid() {
+    return uid;
+  }
+
+  /**
+   * Set the UID.
+   *
+   * @param uid the UID
+   */
+  public void setUid(String uid) {
+    this.uid = uid;
+  }
+
+  /**
+   * Get the DN.
+   *
+   * @return the DN
+   */
+  public String getDn() {
+    return dn;
+  }
+
+  /**
+   * Set the DN.
+   *
+   * @param dn the DN
+   */
+  public void setDn(String dn) {
+    this.dn = dn;
+  }
+
+  @Override
+  public int hashCode() {
+    int result = userName != null ? userName.hashCode() : 0;
+    return result;
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) return true;
+    if (o == null || getClass() != o.getClass()) return false;
+
+    LdapUserDto that = (LdapUserDto) o;
+
+    if (userName != null ? !userName.equals(that.getUserName()) : that.getUserName() != null) return false;
+
+    return true;
+  }
+}

+ 82 - 0
ambari-server/src/main/java/org/apache/ambari/server/security/ldap/LdapUserGroupMemberDto.java

@@ -0,0 +1,82 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.security.ldap;
+
+/**
+ * Pojo with information about LDAP membership.
+ */
+public class LdapUserGroupMemberDto {
+  /**
+   * Name of the group.
+   */
+  private final String groupName;
+
+  /**
+   * Name of the user.
+   */
+  private final String userName;
+
+  /**
+   * Constructor.
+   *
+   * @param groupName group name
+   * @param userName user name
+   */
+  public LdapUserGroupMemberDto(String groupName, String userName) {
+    this.groupName = groupName;
+    this.userName = userName;
+  }
+
+  /**
+   * Get the group name.
+   *
+   * @return the group name
+   */
+  public String getGroupName() {
+    return groupName;
+  }
+
+  /**
+   * Get the user name.
+   *
+   * @return the user name
+   */
+  public String getUserName() {
+    return userName;
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) return true;
+    if (o == null || getClass() != o.getClass()) return false;
+
+    LdapUserGroupMemberDto that = (LdapUserGroupMemberDto) o;
+
+    if (userName != null ? !userName.equals(that.userName) : that.userName != null) return false;
+    if (groupName != null ? !groupName.equals(that.groupName) : that.groupName != null) return false;
+
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    int result = userName != null ? userName.hashCode() : 0;
+    result = 31 * result + (groupName != null ? groupName.hashCode() : 0);
+    return result;
+  }
+}

+ 2 - 1
ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java

@@ -423,6 +423,8 @@ public class ConfigHelper {
 
     for(Service service : clusters.getCluster(clusterName).getServices().values()) {
       Set<PropertyInfo> stackProperties = ambariMetaInfo.getProperties(stack.getName(), stack.getVersion(), service.getName());
+      Set<PropertyInfo> stackLevelProperties = ambariMetaInfo.getStackProperties(stack.getName(), stack.getVersion());
+      stackProperties.addAll(stackLevelProperties);
       
       for (PropertyInfo stackProperty : stackProperties) {
         if(stackProperty.getName().equals(propertyName)) {
@@ -432,7 +434,6 @@ public class ConfigHelper {
           result.add(configType);
         }
       }
-      
     }
     
     return result;

+ 4 - 0
ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java

@@ -567,6 +567,10 @@ public class ViewRegistry {
           LOG.debug("Deleting view instance " + viewName + "/" +
               version + "/" +instanceName);
         }
+        List<PrivilegeEntity> instancePrivileges = privilegeDAO.findByResourceId(instanceEntity.getResource().getId());
+        for (PrivilegeEntity privilegeEntity : instancePrivileges) {
+          privilegeDAO.remove(privilegeEntity);
+        }
         instanceDAO.remove(instanceEntity);
         viewEntity.removeInstanceDefinition(instanceName);
         removeInstanceDefinition(viewEntity, instanceName);

+ 3 - 3
ambari-server/src/main/python/ambari-server.py

@@ -2940,11 +2940,11 @@ def setup_ldap():
   LDAP_PRIMARY_URL_DEFAULT = get_value_from_properties(properties, ldap_property_list_reqd[0])
   LDAP_SECONDARY_URL_DEFAULT = get_value_from_properties(properties, ldap_property_list_reqd[1])
   LDAP_USE_SSL_DEFAULT = get_value_from_properties(properties, ldap_property_list_reqd[2], "false")
-  LDAP_USER_CLASS_DEFAULT = get_value_from_properties(properties, ldap_property_list_reqd[3], "person")
+  LDAP_USER_CLASS_DEFAULT = get_value_from_properties(properties, ldap_property_list_reqd[3], "posixAccount")
   LDAP_USER_ATT_DEFAULT = get_value_from_properties(properties, ldap_property_list_reqd[4], "uid")
-  LDAP_GROUP_CLASS_DEFAULT = get_value_from_properties(properties, ldap_property_list_reqd[5], "groupOfUniqueNames")
+  LDAP_GROUP_CLASS_DEFAULT = get_value_from_properties(properties, ldap_property_list_reqd[5], "posixGroup")
   LDAP_GROUP_ATT_DEFAULT = get_value_from_properties(properties, ldap_property_list_reqd[6], "cn")
-  LDAP_GROUP_MEMBER_DEFAULT = get_value_from_properties(properties, ldap_property_list_reqd[7], "uniqueMember")
+  LDAP_GROUP_MEMBER_DEFAULT = get_value_from_properties(properties, ldap_property_list_reqd[7], "memberUid")
   LDAP_BASE_DN_DEFAULT = get_value_from_properties(properties, ldap_property_list_reqd[8])
   LDAP_BIND_DEFAULT = get_value_from_properties(properties, ldap_property_list_reqd[9], "false")
   LDAP_MGR_DN_DEFAULT = get_value_from_properties(properties, ldap_property_list_opt[0])

+ 4 - 0
ambari-server/src/main/resources/META-INF/persistence.xml

@@ -75,7 +75,11 @@
       <!--<property name="javax.persistence.jdbc.driver" value="org.postgresql.Driver" />-->
       <property name="eclipselink.cache.size.default" value="10000" />
       <property name="eclipselink.jdbc.batch-writing" value="JDBC"/>
+      <property name="eclipselink.jdbc.batch-writing.size" value="4000"/>
+      <property name="eclipselink.jdbc.sequence-connection-pool" value="true" />
       <property name="eclipselink.weaving" value="static" />
+      
+      <!--<property name="eclipselink.logging.level.sql" value="FINEST" />-->
       <!--<property name="eclipselink.id-validation" value="NULL" />-->
 
     </properties>

+ 6 - 2
ambari-server/src/main/resources/scripts/stack_advisor.py

@@ -145,8 +145,12 @@ def instantiateStackAdvisor(stackName, stackVersion, parentVersions):
 if __name__ == '__main__':
   try:
     main(sys.argv)
-  except Exception, e:
+  except StackAdvisorException as stack_exception:
     traceback.print_exc()
-    print "Error occured in stack advisor.\nError details: {0}".format(str(e))
+    print "Error occured in stack advisor.\nError details: {0}".format(str(stack_exception))
     sys.exit(1)
+  except Exception as e:
+    traceback.print_exc()
+    print "Error occured in stack advisor.\nError details: {0}".format(str(e))
+    sys.exit(2)
 

+ 4 - 19
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/params.py

@@ -134,27 +134,12 @@ mapred_local_dir = "/tmp/hadoop-mapred/mapred/local"
 dfs_hosts = default('/configurations/hdfs-site/dfs.hosts', None)
 
 #log4j.properties
-rca_properties = format('''
-ambari.jobhistory.database={ambari_db_rca_url}
-ambari.jobhistory.driver={ambari_db_rca_driver}
-ambari.jobhistory.user={ambari_db_rca_username}
-ambari.jobhistory.password={ambari_db_rca_password}
-ambari.jobhistory.logger=${{hadoop.root.logger}}
+if 'mapred-env' in config['configurations'] and 'rca_properties' in config['configurations']['mapred-env']:
+  rca_properties = format(config['configurations']['mapred-env']['rca_properties'])
 
-log4j.appender.JHA=org.apache.ambari.log4j.hadoop.mapreduce.jobhistory.JobHistoryAppender
-log4j.appender.JHA.database={ambari_db_rca_url}
-log4j.appender.JHA.driver={ambari_db_rca_driver}
-log4j.appender.JHA.user={ambari_db_rca_username}
-log4j.appender.JHA.password={ambari_db_rca_password}
-
-log4j.logger.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=DEBUG,JHA
-log4j.additivity.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=true
-
-''')
-
-if (('hdfs-log4j' in config['configurations']) and ('content' in config['configurations']['hdfs-log4j'])):
+if 'hdfs-log4j' in config['configurations']:
   log4j_props = config['configurations']['hdfs-log4j']['content']
-  if (('mapreduce-log4j' in config['configurations']) and ('content' in config['configurations']['mapreduce-log4j'])):
+  if 'mapreduce-log4j' in config['configurations']:
     log4j_props += config['configurations']['mapreduce-log4j']['content']
     if rca_enabled:
       log4j_props += rca_properties

+ 6 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_client.py

@@ -47,6 +47,12 @@ class HdfsClient(Script):
     import params
     hdfs()
 
+  def generate_configs_get_template_file_content(self, filename, dicts):
+    import params
+    content = super(HdfsClient,self).generate_configs_get_template_file_content(filename, dicts)
+    if filename == 'log4j.properties':
+      content += params.rca_properties
+    return content
 
 if __name__ == "__main__":
   HdfsClient().execute()

+ 15 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py

@@ -173,4 +173,18 @@ ttnode_heapsize = default("/configurations/mapred-env/ttnode_heapsize","1024m")
 dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
 
 mapred_pid_dir_prefix = default("/configurations/hadoop-env/mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
-mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
+mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
+
+rca_enabled = False
+if 'mapred-env' in config['configurations']:
+  rca_enabled =  config['configurations']['mapred-env']['rca_enabled']
+
+ambari_db_rca_url = config['hostLevelParams']['ambari_db_rca_url']
+ambari_db_rca_driver = config['hostLevelParams']['ambari_db_rca_driver']
+ambari_db_rca_username = config['hostLevelParams']['ambari_db_rca_username']
+ambari_db_rca_password = config['hostLevelParams']['ambari_db_rca_password']
+
+rca_properties = ''
+if rca_enabled and 'mapreduce-log4j' in config['configurations'] \
+  and 'rca_properties' in config['configurations']['mapred-env']:
+  rca_properties = format(config['configurations']['mapred-env']['rca_properties'])

+ 20 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/configuration/mapred-env.xml

@@ -57,5 +57,25 @@
     <property-type>USER</property-type>
     <description>MapReduce User.</description>
   </property>
+  <property>
+    <name>rca_properties</name>
+    <value>
+ambari.jobhistory.database={ambari_db_rca_url}
+ambari.jobhistory.driver={ambari_db_rca_driver}
+ambari.jobhistory.user={ambari_db_rca_username}
+ambari.jobhistory.password={ambari_db_rca_password}
+ambari.jobhistory.logger=${{hadoop.root.logger}}
+
+log4j.appender.JHA=org.apache.ambari.log4j.hadoop.mapreduce.jobhistory.JobHistoryAppender
+log4j.appender.JHA.database={ambari_db_rca_url}
+log4j.appender.JHA.driver={ambari_db_rca_driver}
+log4j.appender.JHA.user={ambari_db_rca_username}
+log4j.appender.JHA.password={ambari_db_rca_password}
+
+log4j.logger.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=DEBUG,JHA
+log4j.additivity.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=true
+
+    </value>
+  </property>
 
 </configuration>

+ 7 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/client.py

@@ -38,5 +38,12 @@ class Client(Script):
   def status(self, env):
     raise ClientComponentHasNoStatus()
 
+  def generate_configs_get_template_file_content(self, filename, dicts):
+    import params
+    content = super(Client,self).generate_configs_get_template_file_content(filename, dicts)
+    if filename == 'log4j.properties':
+     content += params.rca_properties
+    return content
+
 if __name__ == "__main__":
   Client().execute()

+ 13 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/params.py

@@ -78,3 +78,16 @@ HdfsDirectory = functools.partial(
 mapred_tt_group = default("/configurations/mapred-site/mapreduce.tasktracker.group", user_group)
 
 slave_hosts = default("/clusterHostInfo/slave_hosts", [])
+
+rca_enabled = False
+if 'mapred-env' in config['configurations']:
+  rca_enabled =  config['configurations']['mapred-env']['rca_enabled']
+
+ambari_db_rca_url = config['hostLevelParams']['ambari_db_rca_url']
+ambari_db_rca_driver = config['hostLevelParams']['ambari_db_rca_driver']
+ambari_db_rca_username = config['hostLevelParams']['ambari_db_rca_username']
+ambari_db_rca_password = config['hostLevelParams']['ambari_db_rca_password']
+
+rca_properties = ''
+if rca_enabled and 'rca_properties' in config['configurations']['mapred-env']:
+  rca_properties = format(config['configurations']['mapred-env']['rca_properties'])

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/metainfo.xml

@@ -29,7 +29,7 @@
       <components>
         <component>
           <name>SQOOP</name>
-          <displayName>Pig</displayName>
+          <displayName>Sqoop</displayName>
           <category>CLIENT</category>
           <cardinality>1+</cardinality>
           <dependencies>

+ 11 - 8
ambari-server/src/main/resources/stacks/HDP/1.3.2/services/stack_advisor.py

@@ -19,6 +19,7 @@ limitations under the License.
 
 import re
 import sys
+from math import ceil
 
 from stack_advisor import DefaultStackAdvisor
 
@@ -146,19 +147,21 @@ class HDP132StackAdvisor(DefaultStackAdvisor):
       24 < cluster["ram"]: 2048
     }[1]
 
+    totalAvailableRam = cluster["ram"] - cluster["reservedRam"]
+    if cluster["hBaseInstalled"]:
+      totalAvailableRam -= cluster["hbaseRam"]
+    cluster["totalAvailableRam"] = max(2048, totalAvailableRam * 1024)
     '''containers = max(3, min (2*cores,min (1.8*DISKS,(Total available RAM) / MIN_CONTAINER_SIZE))))'''
-    cluster["containers"] = max(3,
-                                min(2 * cluster["cpu"],
-                                    int(min(1.8 * cluster["disk"],
-                                            cluster["ram"] / cluster["minContainerSize"]))))
+    cluster["containers"] = round(max(3,
+                                      min(2 * cluster["cpu"],
+                                          min(ceil(1.8 * cluster["disk"]),
+                                              cluster["totalAvailableRam"] / cluster["minContainerSize"]))))
 
     '''ramPerContainers = max(2GB, RAM - reservedRam - hBaseRam) / containers'''
-    cluster["ramPerContainer"] = max(2048,
-                                     cluster["ram"] - cluster["reservedRam"] - cluster["hbaseRam"])
-    cluster["ramPerContainer"] /= cluster["containers"]
+    cluster["ramPerContainer"] = abs(cluster["totalAvailableRam"] / cluster["containers"])
     '''If greater than 1GB, value will be in multiples of 512.'''
     if cluster["ramPerContainer"] > 1024:
-      cluster["ramPerContainer"] = ceil(cluster["ramPerContainer"] / 512) * 512
+      cluster["ramPerContainer"] = int(cluster["ramPerContainer"] / 512) * 512
 
     cluster["mapMemory"] = int(cluster["ramPerContainer"])
     cluster["reduceMemory"] = cluster["ramPerContainer"]

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat.py

@@ -36,7 +36,7 @@ def hcat():
   )
 
   XmlConfig("hive-site.xml",
-            conf_dir=params.hive_conf_dir,
+            conf_dir=params.hive_client_conf_dir,
             configurations=params.config['configurations']['hive-site'],
             configuration_attributes=params.config['configuration_attributes']['hive-site'],
             owner=params.hive_user,

+ 50 - 68
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive.py

@@ -39,52 +39,15 @@ def hive(name=None):
                          mode=params.hive_hdfs_user_mode
     )
     params.HdfsDirectory(None, action="create")
+  
+  # We should change configurations for client as well as for server.
+  # The reason is that stale-configs are service-level, not component.
+  for conf_dir in params.hive_conf_dirs_list:
+    fill_conf_dir(conf_dir)
+    
   if name == 'metastore' or name == 'hiveserver2':
     jdbc_connector()
-
-  Directory(params.hive_conf_dir,
-            owner=params.hive_user,
-            group=params.user_group,
-            recursive=True
-  )
-  Directory(params.hive_server_conf_dir,
-            owner=params.hive_user,
-            group=params.user_group,
-            recursive=True
-  )
-
-  XmlConfig("mapred-site.xml",
-            conf_dir=params.hive_conf_dir,
-            configurations=params.config['configurations']['mapred-site'],
-            configuration_attributes=params.config['configuration_attributes']['mapred-site'],
-            owner=params.hive_user,
-            group=params.user_group,
-            mode=0644)
-
-  XmlConfig("hive-site.xml",
-            conf_dir=params.hive_conf_dir,
-            configurations=params.config['configurations']['hive-site'],
-            configuration_attributes=params.config['configuration_attributes']['hive-site'],
-            owner=params.hive_user,
-            group=params.user_group,
-            mode=0644)
-
-  XmlConfig("mapred-site.xml",
-            conf_dir=params.hive_server_conf_dir,
-            configurations=params.config['configurations']['mapred-site'],
-            configuration_attributes=params.config['configuration_attributes']['mapred-site'],
-            owner=params.hive_user,
-            group=params.user_group,
-            mode=0600)
-
-  XmlConfig("hive-site.xml",
-            conf_dir=params.hive_server_conf_dir,
-            configurations=params.config['configurations']['hive-site'],
-            configuration_attributes=params.config['configuration_attributes']['hive-site'],
-            owner=params.hive_user,
-            group=params.user_group,
-            mode=0600)
-
+    
   environment = {
     "no_proxy": format("{ambari_server_hostname}")
   }
@@ -98,31 +61,19 @@ def hive(name=None):
           not_if=format("[ -f {check_db_connection_jar_name}]"),
           environment = environment)
 
-  File(format("{hive_conf_dir}/hive-env.sh"),
-       owner=params.hive_user,
-       group=params.user_group,
-       content=InlineTemplate(params.hive_env_sh_template)
-  )
-
-  File(format("{hive_server_conf_dir}/hive-env.sh"),
-       owner=params.hive_user,
-       group=params.user_group,
-       content=InlineTemplate(params.hive_env_sh_template)
-  )
-
   if name == 'metastore':
     File(params.start_metastore_path,
          mode=0755,
          content=StaticFile('startMetastore.sh')
     )
     if params.init_metastore_schema:
-      create_schema_cmd = format("export HIVE_CONF_DIR={hive_config_dir} ; "
+      create_schema_cmd = format("export HIVE_CONF_DIR={hive_server_conf_dir} ; "
                                  "{hive_bin}/schematool -initSchema "
                                  "-dbType {hive_metastore_db_type} "
                                  "-userName {hive_metastore_user_name} "
                                  "-passWord {hive_metastore_user_passwd!p}")
 
-      check_schema_created_cmd = format("export HIVE_CONF_DIR={hive_config_dir} ; "
+      check_schema_created_cmd = format("export HIVE_CONF_DIR={hive_server_conf_dir} ; "
                                         "{hive_bin}/schematool -info "
                                         "-dbType {hive_metastore_db_type} "
                                         "-userName {hive_metastore_user_name} "
@@ -141,40 +92,71 @@ def hive(name=None):
     crt_directory(params.hive_pid_dir)
     crt_directory(params.hive_log_dir)
     crt_directory(params.hive_var_lib)
+    
+def fill_conf_dir(component_conf_dir):
+  import params
+  
+  Directory(component_conf_dir,
+            owner=params.hive_user,
+            group=params.user_group,
+            recursive=True
+  )
 
-  crt_file(format("{hive_conf_dir}/hive-default.xml.template"))
-  crt_file(format("{hive_conf_dir}/hive-env.sh.template"))
+  XmlConfig("mapred-site.xml",
+            conf_dir=component_conf_dir,
+            configurations=params.config['configurations']['mapred-site'],
+            configuration_attributes=params.config['configuration_attributes']['mapred-site'],
+            owner=params.hive_user,
+            group=params.user_group,
+            mode=0644)
+
+  XmlConfig("hive-site.xml",
+            conf_dir=component_conf_dir,
+            configurations=params.config['configurations']['hive-site'],
+            configuration_attributes=params.config['configuration_attributes']['hive-site'],
+            owner=params.hive_user,
+            group=params.user_group,
+            mode=0644)
+  
+  File(format("{component_conf_dir}/hive-env.sh"),
+       owner=params.hive_user,
+       group=params.user_group,
+       content=InlineTemplate(params.hive_env_sh_template)
+  )
+  
+  crt_file(format("{component_conf_dir}/hive-default.xml.template"))
+  crt_file(format("{component_conf_dir}/hive-env.sh.template"))
 
   log4j_exec_filename = 'hive-exec-log4j.properties'
   if (params.log4j_exec_props != None):
-    File(format("{params.hive_conf_dir}/{log4j_exec_filename}"),
+    File(format("{component_conf_dir}/{log4j_exec_filename}"),
          mode=0644,
          group=params.user_group,
          owner=params.hive_user,
          content=params.log4j_exec_props
     )
-  elif (os.path.exists("{params.hive_conf_dir}/{log4j_exec_filename}.template")):
-    File(format("{params.hive_conf_dir}/{log4j_exec_filename}"),
+  elif (os.path.exists("{component_conf_dir}/{log4j_exec_filename}.template")):
+    File(format("{component_conf_dir}/{log4j_exec_filename}"),
          mode=0644,
          group=params.user_group,
          owner=params.hive_user,
-         content=StaticFile(format("{params.hive_conf_dir}/{log4j_exec_filename}.template"))
+         content=StaticFile(format("{component_conf_dir}/{log4j_exec_filename}.template"))
     )
 
   log4j_filename = 'hive-log4j.properties'
   if (params.log4j_props != None):
-    File(format("{params.hive_conf_dir}/{log4j_filename}"),
+    File(format("{component_conf_dir}/{log4j_filename}"),
          mode=0644,
          group=params.user_group,
          owner=params.hive_user,
          content=params.log4j_props
     )
-  elif (os.path.exists("{params.hive_conf_dir}/{log4j_filename}.template")):
-    File(format("{params.hive_conf_dir}/{log4j_filename}"),
+  elif (os.path.exists("{component_conf_dir}/{log4j_filename}.template")):
+    File(format("{component_conf_dir}/{log4j_filename}"),
          mode=0644,
          group=params.user_group,
          owner=params.hive_user,
-         content=StaticFile(format("{params.hive_conf_dir}/{log4j_filename}.template"))
+         content=StaticFile(format("{component_conf_dir}/{log4j_filename}.template"))
     )
 
 

+ 6 - 3
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py

@@ -27,7 +27,6 @@ config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
 hive_metastore_user_name = config['configurations']['hive-site']['javax.jdo.option.ConnectionUserName']
-hive_server_conf_dir = "/etc/hive/conf.server"
 hive_jdbc_connection_url = config['configurations']['hive-site']['javax.jdo.option.ConnectionURL']
 
 hive_metastore_user_passwd = config['configurations']['hive-site']['javax.jdo.option.ConnectionPassword']
@@ -73,15 +72,19 @@ kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/
 hive_metastore_keytab_path =  config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
 
 #hive_env
-hive_conf_dir = "/etc/hive/conf"
 hive_dbroot = config['configurations']['hive-env']['hive_dbroot']
 hive_log_dir = config['configurations']['hive-env']['hive_log_dir']
 hive_pid_dir = status_params.hive_pid_dir
 hive_pid = status_params.hive_pid
 #Default conf dir for client
-hive_config_dir = hive_conf_dir
+hive_client_conf_dir = "/etc/hive/conf"
+hive_server_conf_dir = "/etc/hive/conf.server"
+hive_conf_dirs_list = [hive_server_conf_dir, hive_client_conf_dir]
+
 if 'role' in config and config['role'] in ["HIVE_SERVER", "HIVE_METASTORE"]:
   hive_config_dir = hive_server_conf_dir
+else:
+  hive_config_dir = hive_client_conf_dir
 
 #hive-site
 hive_database_name = config['configurations']['hive-env']['hive_database_name']

+ 20 - 17
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py

@@ -19,6 +19,7 @@ limitations under the License.
 
 import re
 import sys
+from math import ceil
 
 from stack_advisor import DefaultStackAdvisor
 
@@ -91,19 +92,19 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
 
   def recommendYARNConfigurations(self, configurations, clusterData):
     putYarnProperty = self.putProperty(configurations, "yarn-site")
-    putYarnProperty('yarn.nodemanager.resource.memory-mb', clusterData['containers'] * clusterData['ramPerContainer'])
-    putYarnProperty('yarn.scheduler.minimum-allocation-mb', clusterData['ramPerContainer'])
-    putYarnProperty('yarn.scheduler.maximum-allocation-mb', clusterData['containers'] * clusterData['ramPerContainer'])
+    putYarnProperty('yarn.nodemanager.resource.memory-mb', int(round(clusterData['containers'] * clusterData['ramPerContainer'])))
+    putYarnProperty('yarn.scheduler.minimum-allocation-mb', int(clusterData['ramPerContainer']))
+    putYarnProperty('yarn.scheduler.maximum-allocation-mb', int(round(clusterData['containers'] * clusterData['ramPerContainer'])))
 
   def recommendMapReduce2Configurations(self, configurations, clusterData):
     putMapredProperty = self.putProperty(configurations, "mapred-site")
-    putMapredProperty('yarn.app.mapreduce.am.resource.mb', clusterData['amMemory'])
-    putMapredProperty('yarn.app.mapreduce.am.command-opts', "-Xmx" + str(int(0.8 * clusterData['amMemory'])) + "m")
+    putMapredProperty('yarn.app.mapreduce.am.resource.mb', int(clusterData['amMemory']))
+    putMapredProperty('yarn.app.mapreduce.am.command-opts', "-Xmx" + str(int(round(0.8 * clusterData['amMemory']))) + "m")
     putMapredProperty('mapreduce.map.memory.mb', clusterData['mapMemory'])
-    putMapredProperty('mapreduce.reduce.memory.mb', clusterData['reduceMemory'])
-    putMapredProperty('mapreduce.map.java.opts', "-Xmx" + str(int(0.8 * clusterData['mapMemory'])) + "m")
-    putMapredProperty('mapreduce.reduce.java.opts', "-Xmx" + str(int(0.8 * clusterData['reduceMemory'])) + "m")
-    putMapredProperty('mapreduce.task.io.sort.mb', int(min(0.4 * clusterData['mapMemory'], 1024)))
+    putMapredProperty('mapreduce.reduce.memory.mb', int(clusterData['reduceMemory']))
+    putMapredProperty('mapreduce.map.java.opts', "-Xmx" + str(int(round(0.8 * clusterData['mapMemory']))) + "m")
+    putMapredProperty('mapreduce.reduce.java.opts', "-Xmx" + str(int(round(0.8 * clusterData['reduceMemory']))) + "m")
+    putMapredProperty('mapreduce.task.io.sort.mb', min(int(round(0.4 * clusterData['mapMemory'])), 1024))
 
   def getClusterData(self, servicesList, hosts, components):
 
@@ -161,19 +162,21 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
       24 < cluster["ram"]: 2048
     }[1]
 
+    totalAvailableRam = cluster["ram"] - cluster["reservedRam"]
+    if cluster["hBaseInstalled"]:
+      totalAvailableRam -= cluster["hbaseRam"]
+    cluster["totalAvailableRam"] = max(2048, totalAvailableRam * 1024)
     '''containers = max(3, min (2*cores,min (1.8*DISKS,(Total available RAM) / MIN_CONTAINER_SIZE))))'''
-    cluster["containers"] = max(3,
+    cluster["containers"] = round(max(3,
                                 min(2 * cluster["cpu"],
-                                    int(min(1.8 * cluster["disk"],
-                                            cluster["ram"] / cluster["minContainerSize"]))))
+                                    min(ceil(1.8 * cluster["disk"]),
+                                            cluster["totalAvailableRam"] / cluster["minContainerSize"]))))
 
     '''ramPerContainers = max(2GB, RAM - reservedRam - hBaseRam) / containers'''
-    cluster["ramPerContainer"] = max(2048,
-                                     cluster["ram"] - cluster["reservedRam"] - cluster["hbaseRam"])
-    cluster["ramPerContainer"] /= cluster["containers"]
+    cluster["ramPerContainer"] = abs(cluster["totalAvailableRam"] / cluster["containers"])
     '''If greater than 1GB, value will be in multiples of 512.'''
     if cluster["ramPerContainer"] > 1024:
-      cluster["ramPerContainer"] = ceil(cluster["ramPerContainer"] / 512) * 512
+      cluster["ramPerContainer"] = int(cluster["ramPerContainer"] / 512) * 512
 
     cluster["mapMemory"] = int(cluster["ramPerContainer"])
     cluster["reduceMemory"] = cluster["ramPerContainer"]
@@ -345,4 +348,4 @@ def formatXmxSizeToBytes(value):
     modifier == 't': 1024 * 1024 * 1024 * 1024,
     modifier == 'p': 1024 * 1024 * 1024 * 1024 * 1024
     }[1]
-  return to_number(value) * m
+  return to_number(value) * m

+ 4 - 4
ambari-server/src/main/resources/stacks/HDP/2.1/services/stack_advisor.py

@@ -39,17 +39,17 @@ class HDP21StackAdvisor(HDP206StackAdvisor):
                         "org.apache.oozie.service.HCatAccessorService")
 
   def recommendHiveConfigurations(self, configurations, clusterData):
-    containerSize = clusterData['mapMemory'] if clusterData['mapMemory'] > 2048 else clusterData['reduceMemory']
+    containerSize = clusterData['mapMemory'] if clusterData['mapMemory'] > 2048 else int(clusterData['reduceMemory'])
     containerSize = min(clusterData['containers'] * clusterData['ramPerContainer'], containerSize)
     putHiveProperty = self.putProperty(configurations, "hive-site")
-    putHiveProperty('hive.auto.convert.join.noconditionaltask.size', int(containerSize / 3) * 1048576)
-    putHiveProperty('hive.tez.java.opts', "-server -Xmx" + str(int(0.8 * containerSize))
+    putHiveProperty('hive.auto.convert.join.noconditionaltask.size', int(round(containerSize / 3)) * 1048576)
+    putHiveProperty('hive.tez.java.opts', "-server -Xmx" + str(int(round(0.8 * containerSize)))
                     + "m -Djava.net.preferIPv4Stack=true -XX:NewRatio=8 -XX:+UseNUMA -XX:+UseParallelGC")
     putHiveProperty('hive.tez.container.size', containerSize)
 
   def recommendTezConfigurations(self, configurations, clusterData):
     putTezProperty = self.putProperty(configurations, "tez-site")
-    putTezProperty("tez.am.resource.memory.mb", clusterData['amMemory'])
+    putTezProperty("tez.am.resource.memory.mb", int(clusterData['amMemory']))
     putTezProperty("tez.am.java.opts",
                    "-server -Xmx" + str(int(0.8 * clusterData["amMemory"]))
                    + "m -Djava.net.preferIPv4Stack=true -XX:+UseNUMA -XX:+UseParallelGC")

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.2.1/repos/repoinfo.xml

@@ -18,7 +18,7 @@
 <reposinfo>
   <os type="redhat6">
     <repo>
-      <baseurl>http://dev.hortonworks.com.s3.amazonaws.com/HDP/centos5/2.x/updates/2.2.0.0</baseurl>
+      <baseurl>http://dev.hortonworks.com.s3.amazonaws.com/HDP/centos6/2.x/updates/2.2.0.0</baseurl>
       <repoid>HDP-2.2.1</repoid>
       <reponame>HDP</reponame>
     </repo>
@@ -30,7 +30,7 @@
   </os>
   <os type="redhat5">
     <repo>
-      <baseurl>http://dev.hortonworks.com.s3.amazonaws.com/HDP/centos6/2.x/updates/2.2.0.0</baseurl>
+      <baseurl>http://dev.hortonworks.com.s3.amazonaws.com/HDP/centos5/2.x/updates/2.2.0.0</baseurl>
       <repoid>HDP-2.2.1</repoid>
       <reponame>HDP</reponame>
     </repo>

+ 29 - 15
ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorRunnerTest.java

@@ -20,6 +20,7 @@ package org.apache.ambari.server.api.services.stackadvisor;
 
 import static org.easymock.EasyMock.expect;
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
 import static org.powermock.api.easymock.PowerMock.createNiceMock;
 import static org.powermock.api.easymock.PowerMock.replay;
 import static org.powermock.api.support.membermodification.MemberModifier.stub;
@@ -56,8 +57,8 @@ public class StackAdvisorRunnerTest {
     temp.delete();
   }
 
-  @Test
-  public void testRunScript_processStartThrowsException_returnFalse() throws IOException {
+  @Test(expected = StackAdvisorException.class)
+  public void testRunScript_processStartThrowsException_returnFalse() throws Exception {
     String script = "echo";
     StackAdvisorCommandType saCommandType = StackAdvisorCommandType.RECOMMEND_COMPONENT_LAYOUT;
     File actionDirectory = temp.newFolder("actionDir");
@@ -68,14 +69,11 @@ public class StackAdvisorRunnerTest {
         .toReturn(processBuilder);
     expect(processBuilder.start()).andThrow(new IOException());
     replay(processBuilder);
-    boolean result = saRunner.runScript(script, saCommandType, actionDirectory);
-
-    assertEquals(false, result);
+    saRunner.runScript(script, saCommandType, actionDirectory);
   }
 
-  @Test
-  public void testRunScript_processExitCodeNonZero_returnFalse() throws IOException,
-      InterruptedException {
+  @Test(expected = StackAdvisorRequestException.class)
+  public void testRunScript_processExitCode1_returnFalse() throws Exception {
     String script = "echo";
     StackAdvisorCommandType saCommandType = StackAdvisorCommandType.RECOMMEND_COMPONENT_LAYOUT;
     File actionDirectory = temp.newFolder("actionDir");
@@ -88,14 +86,28 @@ public class StackAdvisorRunnerTest {
     expect(processBuilder.start()).andReturn(process);
     expect(process.waitFor()).andReturn(1);
     replay(processBuilder, process);
-    boolean result = saRunner.runScript(script, saCommandType, actionDirectory);
+    saRunner.runScript(script, saCommandType, actionDirectory);
+  }
+
+  @Test(expected = StackAdvisorException.class)
+  public void testRunScript_processExitCode2_returnFalse() throws Exception {
+    String script = "echo";
+    StackAdvisorCommandType saCommandType = StackAdvisorCommandType.RECOMMEND_COMPONENT_LAYOUT;
+    File actionDirectory = temp.newFolder("actionDir");
+    ProcessBuilder processBuilder = createNiceMock(ProcessBuilder.class);
+    Process process = createNiceMock(Process.class);
+    StackAdvisorRunner saRunner = new StackAdvisorRunner();
 
-    assertEquals(false, result);
+    stub(PowerMock.method(StackAdvisorRunner.class, "prepareShellCommand"))
+        .toReturn(processBuilder);
+    expect(processBuilder.start()).andReturn(process);
+    expect(process.waitFor()).andReturn(2);
+    replay(processBuilder, process);
+    saRunner.runScript(script, saCommandType, actionDirectory);
   }
 
   @Test
-  public void testRunScript_processExitCodeZero_returnTrue() throws IOException,
-      InterruptedException {
+  public void testRunScript_processExitCodeZero_returnTrue() throws Exception {
     String script = "echo";
     StackAdvisorCommandType saCommandType = StackAdvisorCommandType.RECOMMEND_COMPONENT_LAYOUT;
     File actionDirectory = temp.newFolder("actionDir");
@@ -108,9 +120,11 @@ public class StackAdvisorRunnerTest {
     expect(processBuilder.start()).andReturn(process);
     expect(process.waitFor()).andReturn(0);
     replay(processBuilder, process);
-    boolean result = saRunner.runScript(script, saCommandType, actionDirectory);
-
-    assertEquals(true, result);
+    try {
+      saRunner.runScript(script, saCommandType, actionDirectory);
+    } catch (StackAdvisorException ex) {
+      fail("Should not fail with StackAdvisorException");
+    }
   }
 
 }

+ 15 - 14
ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommandTest.java

@@ -23,6 +23,7 @@ import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 import static org.mockito.Matchers.any;
 import static org.mockito.Matchers.anyString;
+import static org.mockito.Mockito.doAnswer;
 import static org.mockito.Mockito.doReturn;
 import static org.mockito.Mockito.doThrow;
 import static org.mockito.Mockito.mock;
@@ -41,6 +42,7 @@ import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorException;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRequest;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRequest.StackAdvisorRequestBuilder;
+import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRequestException;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorResponse;
 import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorRunner;
 import org.apache.ambari.server.api.services.stackadvisor.commands.StackAdvisorCommand.StackAdvisorData;
@@ -113,8 +115,8 @@ public class StackAdvisorCommandTest {
     doReturn(servicesJSON).when(command).getServicesInformation(request);
     doReturn(data).when(command)
         .adjust(any(StackAdvisorData.class), any(StackAdvisorRequest.class));
-    when(saRunner.runScript(any(String.class), any(StackAdvisorCommandType.class), any(File.class)))
-        .thenReturn(false);
+    doThrow(new StackAdvisorRequestException("error")).when(saRunner)
+        .runScript(any(String.class), any(StackAdvisorCommandType.class), any(File.class));
     command.invoke(request);
 
     assertTrue(false);
@@ -138,8 +140,8 @@ public class StackAdvisorCommandTest {
     doReturn("{\"services\" : \"HDFS\"").when(command).getServicesInformation(request);
     doThrow(new WebApplicationException()).when(command).adjust(any(StackAdvisorData.class),
         any(StackAdvisorRequest.class));
-    when(saRunner.runScript(any(String.class), any(StackAdvisorCommandType.class), any(File.class)))
-        .thenReturn(false);
+    doThrow(new StackAdvisorException("error")).when(saRunner)
+        .runScript(any(String.class), any(StackAdvisorCommandType.class), any(File.class));
     command.invoke(request);
 
     assertTrue(false);
@@ -168,16 +170,15 @@ public class StackAdvisorCommandTest {
     doReturn(servicesJSON).when(command).getServicesInformation(request);
     doReturn(data).when(command)
         .adjust(any(StackAdvisorData.class), any(StackAdvisorRequest.class));
-    when(saRunner.runScript(any(String.class), any(StackAdvisorCommandType.class), any(File.class)))
-        .thenAnswer(new Answer<Boolean>() {
-          public Boolean answer(InvocationOnMock invocation) throws Throwable {
-            String resultFilePath = String.format("%s/%s", requestId, command.getResultFileName());
-            File resultFile = new File(recommendationsDir, resultFilePath);
-            resultFile.getParentFile().mkdirs();
-            FileUtils.writeStringToFile(resultFile, testResourceString);
-            return true;
-          }
-        });
+    doAnswer(new Answer() {
+      public Object answer(InvocationOnMock invocation) throws Throwable {
+        String resultFilePath = String.format("%s/%s", requestId, command.getResultFileName());
+        File resultFile = new File(recommendationsDir, resultFilePath);
+        resultFile.getParentFile().mkdirs();
+        FileUtils.writeStringToFile(resultFile, testResourceString);
+        return null;
+      }
+    }).when(saRunner).runScript(any(String.class), any(StackAdvisorCommandType.class), any(File.class));
     TestResource result = command.invoke(request);
 
     assertEquals(expected, result.getType());

+ 123 - 0
ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ViewInstanceResourceProviderTest.java

@@ -18,8 +18,10 @@
 
 package org.apache.ambari.server.controller.internal;
 
+import org.apache.ambari.server.controller.spi.Predicate;
 import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.ambari.server.controller.spi.ResourceAlreadyExistsException;
+import org.apache.ambari.server.controller.utilities.PredicateBuilder;
 import org.apache.ambari.server.controller.utilities.PropertyHelper;
 import org.apache.ambari.server.orm.entities.ViewEntity;
 import org.apache.ambari.server.orm.entities.ViewInstanceDataEntity;
@@ -27,6 +29,7 @@ import org.apache.ambari.server.orm.entities.ViewInstanceEntity;
 import org.apache.ambari.server.orm.entities.ViewInstancePropertyEntity;
 import org.apache.ambari.server.orm.entities.ViewParameterEntity;
 import org.apache.ambari.server.view.ViewRegistry;
+import org.apache.ambari.view.ViewDefinition;
 import org.easymock.Capture;
 import org.junit.Assert;
 import org.junit.Before;
@@ -111,8 +114,15 @@ public class ViewInstanceResourceProviderTest {
     viewInstanceEntity.setViewName("V1{1.0.0}");
     viewInstanceEntity.setName("I1");
 
+    ViewEntity viewEntity = new ViewEntity();
+    viewEntity.setStatus(ViewDefinition.ViewStatus.LOADED);
+    viewEntity.setName("V1{1.0.0}");
+
+    viewInstanceEntity.setViewEntity(viewEntity);
+
     expect(singleton.instanceExists(viewInstanceEntity)).andReturn(false);
     expect(singleton.getInstanceDefinition("V1", "1.0.0", "I1")).andReturn(viewInstanceEntity);
+    expect(singleton.getDefinition("V1", null)).andReturn(viewEntity);
 
     Capture<ViewInstanceEntity> instanceEntityCapture = new Capture<ViewInstanceEntity>();
     singleton.installViewInstance(capture(instanceEntityCapture));
@@ -144,8 +154,15 @@ public class ViewInstanceResourceProviderTest {
     viewInstanceEntity.setViewName("V1{1.0.0}");
     viewInstanceEntity.setName("I1");
 
+    ViewEntity viewEntity = new ViewEntity();
+    viewEntity.setStatus(ViewDefinition.ViewStatus.LOADED);
+    viewEntity.setName("V1{1.0.0}");
+
+    viewInstanceEntity.setViewEntity(viewEntity);
+
     expect(singleton.instanceExists(viewInstanceEntity)).andReturn(true);
     expect(singleton.getInstanceDefinition("V1", "1.0.0", "I1")).andReturn(viewInstanceEntity);
+    expect(singleton.getDefinition("V1", null)).andReturn(viewEntity);
 
     replay(singleton);
 
@@ -158,4 +175,110 @@ public class ViewInstanceResourceProviderTest {
 
     verify(singleton);
   }
+
+  @Test
+  public void testCreateResources_viewNotLoaded() throws Exception {
+    ViewInstanceResourceProvider provider = new ViewInstanceResourceProvider();
+
+    Set<Map<String, Object>> properties = new HashSet<Map<String, Object>>();
+
+    Map<String, Object> propertyMap = new HashMap<String, Object>();
+
+    propertyMap.put(ViewInstanceResourceProvider.VIEW_NAME_PROPERTY_ID, "V1");
+    propertyMap.put(ViewInstanceResourceProvider.VIEW_VERSION_PROPERTY_ID, "1.0.0");
+    propertyMap.put(ViewInstanceResourceProvider.INSTANCE_NAME_PROPERTY_ID, "I1");
+
+    properties.add(propertyMap);
+
+    ViewEntity viewEntity = new ViewEntity();
+    viewEntity.setName("V1{1.0.0}");
+    viewEntity.setStatus(ViewDefinition.ViewStatus.LOADING);
+    ViewInstanceEntity viewInstanceEntity = new ViewInstanceEntity();
+    viewInstanceEntity.setViewName("V1{1.0.0}");
+    viewInstanceEntity.setName("I1");
+    viewInstanceEntity.setViewEntity(viewEntity);
+
+    expect(singleton.getInstanceDefinition("V1", "1.0.0", "I1")).andReturn(viewInstanceEntity);
+    expect(singleton.getDefinition("V1", null)).andReturn(viewEntity);
+
+    replay(singleton);
+
+    try {
+      provider.createResources(PropertyHelper.getCreateRequest(properties, null));
+      fail("Expected IllegalStateException.");
+    } catch (IllegalStateException e) {
+      // expected
+    }
+
+    verify(singleton);
+  }
+
+  @Test
+  public void testUpdateResources_viewNotLoaded() throws Exception {
+    ViewInstanceResourceProvider provider = new ViewInstanceResourceProvider();
+
+    Set<Map<String, Object>> properties = new HashSet<Map<String, Object>>();
+
+    Map<String, Object> propertyMap = new HashMap<String, Object>();
+
+    propertyMap.put(ViewInstanceResourceProvider.ICON_PATH_ID, "path");
+
+    properties.add(propertyMap);
+
+    PredicateBuilder predicateBuilder = new PredicateBuilder();
+    Predicate predicate =
+        predicateBuilder.property(ViewInstanceResourceProvider.VIEW_NAME_PROPERTY_ID).equals("V1").toPredicate();
+    ViewEntity viewEntity = new ViewEntity();
+    viewEntity.setName("V1{1.0.0}");
+    viewEntity.setStatus(ViewDefinition.ViewStatus.LOADING);
+    ViewInstanceEntity viewInstanceEntity = new ViewInstanceEntity();
+    viewInstanceEntity.setViewName("V1{1.0.0}");
+    viewInstanceEntity.setName("I1");
+    viewInstanceEntity.setViewEntity(viewEntity);
+
+    expect(singleton.getDefinitions()).andReturn(Collections.singleton(viewEntity));
+
+    replay(singleton);
+
+    provider.updateResources(PropertyHelper.getCreateRequest(properties, null), predicate);
+
+    Assert.assertNull(viewInstanceEntity.getIcon());
+
+    verify(singleton);
+  }
+
+  @Test
+  public void testDeleteResources_viewNotLoaded() throws Exception {
+    ViewInstanceResourceProvider provider = new ViewInstanceResourceProvider();
+
+    Set<Map<String, Object>> properties = new HashSet<Map<String, Object>>();
+
+    Map<String, Object> propertyMap = new HashMap<String, Object>();
+
+    propertyMap.put(ViewInstanceResourceProvider.VIEW_NAME_PROPERTY_ID, "V1");
+    propertyMap.put(ViewInstanceResourceProvider.VIEW_VERSION_PROPERTY_ID, "1.0.0");
+    propertyMap.put(ViewInstanceResourceProvider.INSTANCE_NAME_PROPERTY_ID, "I1");
+
+    properties.add(propertyMap);
+
+    PredicateBuilder predicateBuilder = new PredicateBuilder();
+    Predicate predicate =
+        predicateBuilder.property(ViewInstanceResourceProvider.VIEW_NAME_PROPERTY_ID).equals("V1").toPredicate();
+
+    ViewEntity viewEntity = new ViewEntity();
+    viewEntity.setName("V1{1.0.0}");
+    viewEntity.setStatus(ViewDefinition.ViewStatus.LOADING);
+    ViewInstanceEntity viewInstanceEntity = new ViewInstanceEntity();
+    viewInstanceEntity.setViewName("V1{1.0.0}");
+    viewInstanceEntity.setName("I1");
+    viewInstanceEntity.setViewEntity(viewEntity);
+
+    expect(singleton.getDefinitions()).andReturn(Collections.singleton(viewEntity));
+
+    replay(singleton);
+
+    provider.deleteResources(predicate);
+
+    verify(singleton);
+  }
 }

+ 133 - 0
ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ViewPermissionResourceProviderTest.java

@@ -0,0 +1,133 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.controller.internal;
+
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.utilities.PropertyHelper;
+import org.apache.ambari.server.orm.dao.PermissionDAO;
+import org.apache.ambari.server.orm.entities.PermissionEntity;
+import org.apache.ambari.server.orm.entities.ResourceTypeEntity;
+import org.apache.ambari.server.orm.entities.ViewEntity;
+import org.apache.ambari.server.view.ViewRegistry;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.util.Collections;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Set;
+
+import static org.easymock.EasyMock.createMock;
+import static org.easymock.EasyMock.createNiceMock;
+import static org.easymock.EasyMock.createStrictMock;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.reset;
+import static org.easymock.EasyMock.verify;
+
+/**
+ * ViewPermissionResourceProvider tests.
+ */
+public class ViewPermissionResourceProviderTest {
+  private final static PermissionDAO dao = createStrictMock(PermissionDAO.class);
+  private static final ViewRegistry viewRegistry = createMock(ViewRegistry.class);
+
+  static {
+    ViewRegistry.initInstance(viewRegistry);
+  }
+
+  @BeforeClass
+  public static void initClass() {
+    ViewPermissionResourceProvider.init(dao);
+  }
+
+  @Before
+  public void resetGlobalMocks() {
+    reset(dao, viewRegistry);
+  }
+
+  @Test
+  public void testGetResources() throws Exception {
+    List<PermissionEntity> permissionEntities = new LinkedList<PermissionEntity>();
+
+    PermissionEntity permissionEntity = createNiceMock(PermissionEntity.class);
+    PermissionEntity viewUsePermissionEntity = createNiceMock(PermissionEntity.class);
+    ResourceTypeEntity resourceTypeEntity = createNiceMock(ResourceTypeEntity.class);
+    ViewEntity viewEntity = createMock(ViewEntity.class);
+
+    permissionEntities.add(permissionEntity);
+
+    expect(dao.findViewUsePermission()).andReturn(viewUsePermissionEntity);
+    expect(dao.findAll()).andReturn(Collections.singletonList(permissionEntity));
+
+    expect(permissionEntity.getId()).andReturn(99);
+    expect(permissionEntity.getPermissionName()).andReturn("P1");
+    expect(permissionEntity.getResourceType()).andReturn(resourceTypeEntity);
+    expect(resourceTypeEntity.getName()).andReturn("V1");
+
+    expect(viewEntity.isLoaded()).andReturn(true).anyTimes();
+    expect(viewEntity.getCommonName()).andReturn("V1").anyTimes();
+    expect(viewEntity.getVersion()).andReturn("1.0.0").anyTimes();
+
+    expect(viewRegistry.getDefinition(resourceTypeEntity)).andReturn(viewEntity);
+
+    replay(dao, permissionEntity, viewUsePermissionEntity, resourceTypeEntity, viewEntity, viewRegistry);
+    ViewPermissionResourceProvider provider = new ViewPermissionResourceProvider();
+    Set<Resource> resources = provider.getResources(PropertyHelper.getReadRequest(), null);
+    // built in permissions
+    Assert.assertEquals(1, resources.size());
+    Resource resource = resources.iterator().next();
+
+    Assert.assertEquals(99, resource.getPropertyValue(PermissionResourceProvider.PERMISSION_ID_PROPERTY_ID));
+    Assert.assertEquals("P1", resource.getPropertyValue(PermissionResourceProvider.PERMISSION_NAME_PROPERTY_ID));
+    Assert.assertEquals("V1", resource.getPropertyValue(PermissionResourceProvider.RESOURCE_NAME_PROPERTY_ID));
+    verify(dao, permissionEntity, viewUsePermissionEntity, resourceTypeEntity, viewEntity, viewRegistry);
+  }
+
+  @Test
+  public void testGetResources_viewNotLoaded() throws Exception {
+    List<PermissionEntity> permissionEntities = new LinkedList<PermissionEntity>();
+
+    PermissionEntity permissionEntity = createNiceMock(PermissionEntity.class);
+    PermissionEntity viewUsePermissionEntity = createNiceMock(PermissionEntity.class);
+    ResourceTypeEntity resourceTypeEntity = createNiceMock(ResourceTypeEntity.class);
+    ViewEntity viewEntity = createMock(ViewEntity.class);
+
+    permissionEntities.add(permissionEntity);
+
+    expect(dao.findViewUsePermission()).andReturn(viewUsePermissionEntity);
+    expect(dao.findAll()).andReturn(Collections.singletonList(permissionEntity));
+
+    expect(permissionEntity.getResourceType()).andReturn(resourceTypeEntity);
+
+    expect(viewEntity.isLoaded()).andReturn(false).anyTimes();
+
+    expect(viewRegistry.getDefinition(resourceTypeEntity)).andReturn(viewEntity);
+
+    replay(dao, permissionEntity, viewUsePermissionEntity, resourceTypeEntity, viewEntity, viewRegistry);
+    ViewPermissionResourceProvider provider = new ViewPermissionResourceProvider();
+    Set<Resource> resources = provider.getResources(PropertyHelper.getReadRequest(), null);
+    // built in permissions
+    Assert.assertEquals(0, resources.size());
+
+    verify(dao, permissionEntity, viewUsePermissionEntity, resourceTypeEntity, viewEntity, viewRegistry);
+  }
+}

+ 3 - 5
ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ViewPrivilegeResourceProviderTest.java

@@ -45,6 +45,7 @@ import org.apache.ambari.server.security.SecurityHelper;
 import org.apache.ambari.server.view.ViewInstanceHandlerList;
 import org.apache.ambari.server.view.ViewRegistry;
 import org.apache.ambari.server.view.ViewRegistryTest;
+import org.apache.ambari.view.ViewDefinition;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.BeforeClass;
@@ -100,6 +101,8 @@ public class ViewPrivilegeResourceProviderTest {
     ViewInstanceEntity viewInstanceDefinition = ViewInstanceEntityTest.getViewInstanceEntity();
 
     viewDefinition.addInstanceDefinition(viewInstanceDefinition);
+    viewInstanceDefinition.setViewEntity(viewDefinition);
+    viewDefinition.setStatus(ViewDefinition.ViewStatus.LOADED);
 
     ViewRegistry registry = ViewRegistry.getInstance();
 
@@ -159,10 +162,5 @@ public class ViewPrivilegeResourceProviderTest {
     verify(privilegeDAO, userDAO, groupDAO, principalDAO, permissionDAO, resourceDAO, privilegeEntity, resourceEntity,
         userEntity, principalEntity, permissionEntity, principalTypeEntity);
   }
-
-  @Test
-  public void testUpdateResources() throws Exception {
-    // see AmbariPrivilegeResourceProvider#testUpdateResources
-  }
 }
 

+ 17 - 0
ambari-server/src/test/java/org/apache/ambari/server/orm/entities/ViewEntityTest.java

@@ -266,4 +266,21 @@ public class ViewEntityTest {
     viewDefinition.setStatusDetail("status detail");
     Assert.assertEquals("status detail", viewDefinition.getStatusDetail());
   }
+
+  @Test
+  public void testIsLoaded() throws Exception {
+    ViewEntity viewDefinition = getViewEntity();
+
+    viewDefinition.setStatus(ViewDefinition.ViewStatus.PENDING);
+    Assert.assertFalse(viewDefinition.isLoaded());
+
+    viewDefinition.setStatus(ViewDefinition.ViewStatus.LOADING);
+    Assert.assertFalse(viewDefinition.isLoaded());
+
+    viewDefinition.setStatus(ViewDefinition.ViewStatus.LOADED);
+    Assert.assertTrue(viewDefinition.isLoaded());
+
+    viewDefinition.setStatus(ViewDefinition.ViewStatus.ERROR);
+    Assert.assertFalse(viewDefinition.isLoaded());
+  }
 }

+ 16 - 0
ambari-server/src/test/java/org/apache/ambari/server/orm/entities/ViewInstanceEntityTest.java

@@ -20,6 +20,7 @@ package org.apache.ambari.server.orm.entities;
 
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.orm.entities.ViewInstanceEntity.ViewInstanceVersionDTO;
 import org.apache.ambari.server.security.SecurityHelper;
 import org.apache.ambari.server.view.ViewRegistryTest;
 import org.apache.ambari.server.view.configuration.InstanceConfig;
@@ -298,6 +299,21 @@ public class ViewInstanceEntityTest {
         viewInstanceDefinition.getContextPath());
   }
 
+  @Test
+  public void testParseContextPath() throws Exception {
+    final String[] pathesToTest = {
+        ViewInstanceEntity.VIEWS_CONTEXT_PATH_PREFIX + "MY_VIEW/1.0.0/INSTANCE1",
+        ViewInstanceEntity.VIEWS_CONTEXT_PATH_PREFIX + "MY_VIEW/1.0.0/INSTANCE1/index.html",
+        ViewInstanceEntity.VIEWS_CONTEXT_PATH_PREFIX + "MY_VIEW/1.0.0/INSTANCE1/api/test"
+    };
+    for (String contextPath: pathesToTest) {
+      final ViewInstanceVersionDTO dto = ViewInstanceEntity.parseContextPath(contextPath);
+      Assert.assertEquals("INSTANCE1", dto.getInstanceName());
+      Assert.assertEquals("MY_VIEW", dto.getViewName());
+      Assert.assertEquals("1.0.0", dto.getVersion());
+    }
+  }
+
   @Test
   public void testInstanceData() throws Exception {
     TestSecurityHelper securityHelper = new TestSecurityHelper("user1");

+ 9 - 68
ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AmbariLdapDataPopulatorTest.java → ambari-server/src/test/java/org/apache/ambari/server/security/ldap/AmbariLdapDataPopulatorTest.java

@@ -15,17 +15,13 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.ambari.server.security.authorization;
+package org.apache.ambari.server.security.ldap;
 
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Date;
-import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
 import junit.framework.Assert;
 
 import org.apache.ambari.server.AmbariException;
@@ -35,10 +31,15 @@ import org.apache.ambari.server.orm.entities.MemberEntity;
 import org.apache.ambari.server.orm.entities.PrincipalEntity;
 import org.apache.ambari.server.orm.entities.PrivilegeEntity;
 import org.apache.ambari.server.orm.entities.UserEntity;
+import org.apache.ambari.server.security.authorization.LdapServerProperties;
+import org.apache.ambari.server.security.authorization.User;
+import org.apache.ambari.server.security.authorization.Users;
+import org.apache.ambari.server.security.ldap.AmbariLdapDataPopulator;
 import org.easymock.Capture;
 import org.easymock.EasyMock;
 import org.easymock.IAnswer;
 import org.junit.Test;
+import org.springframework.ldap.core.AttributesMapper;
 import org.springframework.ldap.core.LdapTemplate;
 
 public class AmbariLdapDataPopulatorTest {
@@ -62,66 +63,6 @@ public class AmbariLdapDataPopulatorTest {
     }
   }
 
-  @Test
-  public void testRefreshGroupMembers() throws AmbariException {
-    final Configuration configuration = EasyMock.createNiceMock(Configuration.class);
-    final Users users = EasyMock.createNiceMock(Users.class);
-
-    final GroupEntity ldapGroup = new GroupEntity();
-    ldapGroup.setGroupId(1);
-    ldapGroup.setGroupName("ldapGroup");
-    ldapGroup.setLdapGroup(true);
-    ldapGroup.setMemberEntities(new HashSet<MemberEntity>());
-
-    final User ldapUserWithoutGroup = createLdapUserWithoutGroup();
-    final User ldapUserWithGroup = createLdapUserWithGroup(ldapGroup);
-    final User localUserWithoutGroup = createLocalUserWithoutGroup();
-    final User localUserWithGroup = createLocalUserWithGroup(ldapGroup);
-
-    final AmbariLdapDataPopulator populator = new AmbariLdapDataPopulatorTestInstance(configuration, users) {
-      @Override
-      protected Set<String> getExternalLdapGroupMembers(String groupName) {
-        return new HashSet<String>() {
-          {
-            add(ldapUserWithGroup.getUserName());
-            add(ldapUserWithoutGroup.getUserName());
-          }
-        };
-      }
-
-      @Override
-      protected Map<String, User> getInternalUsers() {
-        return new HashMap<String, User>() {
-          {
-            put(localUserWithGroup.getUserName(), localUserWithGroup);
-            put(localUserWithoutGroup.getUserName(), localUserWithoutGroup);
-          }
-        };
-      }
-
-      @Override
-      protected Map<String, User> getInternalMembers(String groupName) {
-        return new HashMap<String, User>() {
-          {
-            put(localUserWithGroup.getUserName(), localUserWithGroup);
-          }
-        };
-      }
-    };
-
-    users.createUser(EasyMock.<String> anyObject(), EasyMock.<String> anyObject());
-    EasyMock.expectLastCall().times(2);
-
-    users.addMemberToGroup(EasyMock.<String> anyObject(), EasyMock.<String> anyObject());
-    EasyMock.expectLastCall().times(2);
-
-    EasyMock.replay(users);
-
-    populator.refreshGroupMembers(ldapGroup.getGroupName());
-
-    EasyMock.verify(users);
-  }
-
   @Test
   public void testIsLdapEnabled_badConfiguration() {
     final Configuration configuration = EasyMock.createNiceMock(Configuration.class);
@@ -130,7 +71,7 @@ public class AmbariLdapDataPopulatorTest {
     final AmbariLdapDataPopulator populator = new AmbariLdapDataPopulatorTestInstance(configuration, users);
 
     EasyMock.expect(configuration.isLdapConfigured()).andReturn(true);
-    EasyMock.expect(populator.loadLdapTemplate().list(EasyMock. <String>anyObject())).andThrow(new NullPointerException()).once();
+    EasyMock.expect(populator.loadLdapTemplate().search(EasyMock. <String>anyObject(), EasyMock. <String>anyObject(), EasyMock. <AttributesMapper>anyObject())).andThrow(new NullPointerException()).once();
     EasyMock.replay(populator.loadLdapTemplate(), configuration);
 
     Assert.assertFalse(populator.isLdapEnabled());
@@ -145,8 +86,8 @@ public class AmbariLdapDataPopulatorTest {
     final AmbariLdapDataPopulator populator = new AmbariLdapDataPopulatorTestInstance(configuration, users);
 
     EasyMock.expect(configuration.isLdapConfigured()).andReturn(true);
-    EasyMock.expect(populator.loadLdapTemplate().list(EasyMock. <String>anyObject())).andReturn(Collections.emptyList()).once();
-    EasyMock.replay(populator.loadLdapTemplate(),configuration);
+    EasyMock.expect(populator.loadLdapTemplate().search(EasyMock. <String>anyObject(), EasyMock. <String>anyObject(), EasyMock. <AttributesMapper>anyObject())).andReturn(Collections.emptyList()).once();
+    EasyMock.replay(populator.loadLdapTemplate(), configuration);
 
     Assert.assertTrue(populator.isLdapEnabled());
     EasyMock.verify(populator.loadLdapTemplate(), configuration);

+ 94 - 0
ambari-server/src/test/java/org/apache/ambari/server/security/ldap/LdapPerformanceTest.java

@@ -0,0 +1,94 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.security.ldap;
+
+import java.util.HashSet;
+import java.util.Set;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.orm.GuiceJpaInitializer;
+import org.apache.ambari.server.security.ClientSecurityType;
+import org.apache.ambari.server.security.authorization.AuthorizationTestModule;
+import org.apache.ambari.server.security.authorization.Users;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Test;
+
+import com.google.inject.Guice;
+import com.google.inject.Inject;
+import com.google.inject.Injector;
+
+/**
+ * Performs sync request to real LDAP server.
+ */
+@Ignore
+public class LdapPerformanceTest {
+
+  private static Injector injector;
+
+  @Inject
+  private AmbariLdapDataPopulator populator;
+
+  @Inject
+  private Users users;
+
+  @Inject
+  Configuration configuration;
+
+  final String SPRING_CONTEXT_LOCATION = "classpath:webapp/WEB-INF/spring-security.xml";
+
+  @Before
+  public void setUp() {
+    injector = Guice.createInjector(new AuthorizationTestModule());
+
+    injector.injectMembers(this);
+    injector.getInstance(GuiceJpaInitializer.class);
+    configuration.setClientSecurityType(ClientSecurityType.LDAP);
+    configuration.setLdap("c6402.ambari.apache.org:389", "posixAccount", "uid",
+        "posixGroup", "cn", "memberUid", "dc=apache,dc=org", false,
+        "uid=hdfs,ou=people,ou=dev,dc=apache,dc=org", "hdfs");
+  }
+
+  @Test
+  public void testLdapSync() throws AmbariException, InterruptedException {
+    long time = System.currentTimeMillis();
+    Set<LdapGroupDto> groups = populator.getExternalLdapGroupInfo();
+    Set<LdapUserDto> users = populator.getExternalLdapUserInfo();
+    Set<String> userNames = new HashSet<String>();
+    for (LdapUserDto user : users) {
+      userNames.add(user.getUserName());
+    }
+    Set<String> groupNames = new HashSet<String>();
+    for (LdapGroupDto group : groups) {
+      groupNames.add(group.getGroupName());
+    }
+    System.out.println("Data fetch: " + (System.currentTimeMillis() - time));
+    time = System.currentTimeMillis();
+    LdapBatchDto batchDto = populator.synchronizeLdapUsersAndGroups(userNames, groupNames);
+    batchDto = populator.synchronizeLdapUsersAndGroups(userNames, groupNames);
+    this.users.processLdapSync(batchDto);
+    System.out.println("Initial sync: " + (System.currentTimeMillis() - time));
+    time = System.currentTimeMillis();
+    batchDto = populator.synchronizeLdapUsersAndGroups(userNames, groupNames);
+    this.users.processLdapSync(batchDto);
+    System.out.println("Subsequent sync: " + (System.currentTimeMillis() - time));
+    time = System.currentTimeMillis();
+  }
+}

+ 39 - 0
ambari-server/src/test/java/org/apache/ambari/server/view/ViewRegistryTest.java

@@ -33,10 +33,12 @@ import java.io.InputStream;
 import java.net.MalformedURLException;
 import java.net.URI;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.Enumeration;
 import java.util.HashMap;
+import java.util.List;
 import java.util.Map;
 import java.util.Properties;
 import java.util.Set;
@@ -702,6 +704,43 @@ public class ViewRegistryTest {
     verify(viewDAO, viewInstanceDAO, securityHelper);
   }
 
+  @Test
+  public void testUninstallViewInstance() throws Exception {
+
+    ViewRegistry registry = getRegistry();
+
+    Configuration ambariConfig = new Configuration(new Properties());
+
+    ViewConfig config = ViewConfigTest.getConfig(xml_valid_instance);
+    ViewEntity viewEntity = getViewEntity(config, ambariConfig, getClass().getClassLoader(), "");
+    ViewInstanceEntity viewInstanceEntity = getViewInstanceEntity(viewEntity, config.getInstances().get(0));
+    ResourceEntity resource = new ResourceEntity();
+    resource.setId(3L);
+    viewInstanceEntity.setResource(resource);
+    PrivilegeEntity privilege1 = createNiceMock(PrivilegeEntity.class);
+    PrivilegeEntity privilege2 = createNiceMock(PrivilegeEntity.class);
+    List<PrivilegeEntity> privileges = Arrays.asList(privilege1, privilege2);
+
+    expect(privilegeDAO.findByResourceId(3L)).andReturn(privileges);
+    privilegeDAO.remove(privilege1);
+    privilegeDAO.remove(privilege2);
+    viewInstanceDAO.remove(viewInstanceEntity);
+
+    handlerList.removeViewInstance(viewInstanceEntity);
+
+    replay(viewInstanceDAO, privilegeDAO, handlerList);
+
+    registry.addDefinition(viewEntity);
+    registry.addInstanceDefinition(viewEntity, viewInstanceEntity);
+    registry.uninstallViewInstance(viewInstanceEntity);
+
+    Collection<ViewInstanceEntity> viewInstanceDefinitions = registry.getInstanceDefinitions(viewEntity);
+
+    Assert.assertEquals(0, viewInstanceDefinitions.size());
+
+    verify(viewInstanceDAO, privilegeDAO, handlerList);
+  }
+
   @Test
   public void testUpdateViewInstance_invalid() throws Exception {
 

+ 59 - 0
ambari-server/src/test/python/stacks/1.3.2/HDFS/test_hdfs_client.py

@@ -0,0 +1,59 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, call, patch
+import tempfile
+import tarfile
+import contextlib
+from stacks.utils.RMFTestCase import *
+import os
+
+class Test(RMFTestCase):
+
+  @patch.object(tarfile,"open", new = MagicMock())
+  @patch.object(tempfile,"mkdtemp", new = MagicMock(return_value='/tmp/123'))
+  @patch.object(contextlib,"closing", new = MagicMock())
+  @patch("os.path.exists", new = MagicMock(return_value=True))
+  def test_generate_configs_default(self):
+    self.executeScript("1.3.2/services/HDFS/package/scripts/hdfs_client.py",
+                       classname = "HdfsClient",
+                       command = "generate_configs",
+                       config_file="default.json"
+    )
+    self.assertResourceCalled('Directory', '/tmp',
+                              recursive = True,
+                              )
+    self.printResources()
+    self.assertResourceCalled('XmlConfig', 'core-site.xml',
+                              conf_dir = '/tmp/123',
+                              configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
+                              configurations = self.getConfig()['configurations']['core-site'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              conf_dir = '/tmp/123',
+                              configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
+    self.assertResourceCalled('File', '/tmp/123/log4j.properties',
+                              content = InlineTemplate("log4jproperties\nline2log4jproperties\nline2\nambari.jobhistory.database=jdbc:postgresql://c6401.ambari.apache.org/ambarirca\nambari.jobhistory.driver=org.postgresql.Driver\nambari.jobhistory.user=mapred\nambari.jobhistory.password=mapred\nambari.jobhistory.logger=${hadoop.root.logger}\n\nlog4j.appender.JHA=org.apache.ambari.log4j.hadoop.mapreduce.jobhistory.JobHistoryAppender\nlog4j.appender.JHA.database=jdbc:postgresql://c6401.ambari.apache.org/ambarirca\nlog4j.appender.JHA.driver=org.postgresql.Driver\nlog4j.appender.JHA.user=mapred\nlog4j.appender.JHA.password=mapred\n\nlog4j.logger.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=DEBUG,JHA\nlog4j.additivity.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=true\n\n"),
+                              )
+    self.assertResourceCalled('Directory', '/tmp/123',
+                              action = ['delete'],
+                              )
+    self.assertNoMoreResources()

+ 35 - 1
ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_client.py

@@ -18,6 +18,9 @@ See the License for the specific language governing permissions and
 limitations under the License.
 '''
 from mock.mock import MagicMock, call, patch
+import tempfile
+import tarfile
+import contextlib
 from stacks.utils.RMFTestCase import *
 import os
 
@@ -165,4 +168,35 @@ class TestMapreduceClient(RMFTestCase):
                               owner = 'mapred',
                               group = 'hadoop',
                               )
-    self.assertNoMoreResources()
+    self.assertNoMoreResources()
+
+  @patch.object(tarfile,"open", new = MagicMock())
+  @patch.object(tempfile,"mkdtemp", new = MagicMock(return_value='/tmp/123'))
+  @patch.object(contextlib,"closing", new = MagicMock())
+  @patch("os.path.exists", new = MagicMock(return_value=True))
+  def test_generate_configs_default(self):
+    self.executeScript("1.3.2/services/MAPREDUCE/package/scripts/client.py",
+                       classname = "Client",
+                       command = "generate_configs",
+                       config_file="default.json"
+    )
+    self.assertResourceCalled('Directory', '/tmp',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('XmlConfig', 'core-site.xml',
+                              conf_dir = '/tmp/123',
+                              configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
+                              configurations = self.getConfig()['configurations']['core-site'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              conf_dir = '/tmp/123',
+                              configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
+    self.assertResourceCalled('File', '/tmp/123/log4j.properties',
+                              content = InlineTemplate("log4jproperties\nline2log4jproperties\nline2\nambari.jobhistory.database=jdbc:postgresql://c6401.ambari.apache.org/ambarirca\nambari.jobhistory.driver=org.postgresql.Driver\nambari.jobhistory.user=mapred\nambari.jobhistory.password=mapred\nambari.jobhistory.logger=${hadoop.root.logger}\n\nlog4j.appender.JHA=org.apache.ambari.log4j.hadoop.mapreduce.jobhistory.JobHistoryAppender\nlog4j.appender.JHA.database=jdbc:postgresql://c6401.ambari.apache.org/ambarirca\nlog4j.appender.JHA.driver=org.postgresql.Driver\nlog4j.appender.JHA.user=mapred\nlog4j.appender.JHA.password=mapred\n\nlog4j.logger.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=DEBUG,JHA\nlog4j.additivity.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=true\n\n"),
+                              )
+    self.assertResourceCalled('Directory', '/tmp/123',
+                              action = ['delete'],
+                              )
+    self.assertNoMoreResources()

+ 7 - 3
ambari-server/src/test/python/stacks/1.3.2/configs/default.json

@@ -32,8 +32,11 @@
         "script": "scripts/datanode.py",
         "excluded_hosts": "host1,host2",
         "mark_draining_only" : "false",
-        "update_exclude_file_only" : "false"
-    }, 
+        "update_exclude_file_only" : "false",
+        "xml_configs_list":[{"core-site.xml":"core-site"},{"mapred-site.xml":"mapred-site"}],
+        "env_configs_list":[{"log4j.properties":"hdfs-log4j,mapreduce-log4j"}],
+        "output_file":"MAPREDUCE_CLIENT-configs.tar.gz"
+    },
     "taskId": 18, 
     "public_hostname": "c6402.ambari.apache.org", 
     "configurations": {
@@ -326,7 +329,8 @@
             "jtnode_opt_newsize": "200m", 
             "mapred_user": "mapred", 
             "hadoop_heapsize": "1024", 
-            "jtnode_opt_maxnewsize": "200m"
+            "jtnode_opt_maxnewsize": "200m",
+            "rca_properties": "\nambari.jobhistory.database={ambari_db_rca_url}\nambari.jobhistory.driver={ambari_db_rca_driver}\nambari.jobhistory.user={ambari_db_rca_username}\nambari.jobhistory.password={ambari_db_rca_password}\nambari.jobhistory.logger=${{hadoop.root.logger}}\n\nlog4j.appender.JHA=org.apache.ambari.log4j.hadoop.mapreduce.jobhistory.JobHistoryAppender\nlog4j.appender.JHA.database={ambari_db_rca_url}\nlog4j.appender.JHA.driver={ambari_db_rca_driver}\nlog4j.appender.JHA.user={ambari_db_rca_username}\nlog4j.appender.JHA.password={ambari_db_rca_password}\n\nlog4j.logger.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=DEBUG,JHA\nlog4j.additivity.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=true\n\n"
         }, 
         "nagios-env": {
             "hive_metastore_user_passwd": "password", 

+ 160 - 126
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py

@@ -28,84 +28,101 @@ class TestHiveClient(RMFTestCase):
                        command = "configure",
                        config_file="default_client.json"
     )
-
-    self.assertResourceCalled('Directory', '/etc/hive/conf',
-      owner = 'hive',
-      group = 'hadoop',
-      recursive = True,
-    )
     self.assertResourceCalled('Directory', '/etc/hive/conf.server',
-      owner = 'hive',
-      group = 'hadoop',
-      recursive = True,
+        owner = 'hive',
+        group = 'hadoop',
+        recursive = True,
     )
     self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0644,
-      conf_dir = '/etc/hive/conf',
-      configurations = self.getConfig()['configurations']['mapred-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
+        group = 'hadoop',
+        conf_dir = '/etc/hive/conf.server',
+        mode = 0644,
+        configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
+        owner = 'hive',
+        configurations = self.getConfig()['configurations']['mapred-site'],
     )
     self.assertResourceCalled('XmlConfig', 'hive-site.xml',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0644,
-      conf_dir = '/etc/hive/conf',
-      configurations = self.getConfig()['configurations']['hive-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['hive-site']
+        group = 'hadoop',
+        conf_dir = '/etc/hive/conf.server',
+        mode = 0644,
+        configuration_attributes = self.getConfig()['configuration_attributes']['hive-site'],
+        owner = 'hive',
+        configurations = self.getConfig()['configurations']['hive-site'],
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
+        content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-default.xml.template',
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh.template',
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-exec-log4j.properties',
+        content = 'log4jproperties\nline2',
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0644,
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-log4j.properties',
+        content = 'log4jproperties\nline2',
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0644,
+    )
+    self.assertResourceCalled('Directory', '/etc/hive/conf',
+        owner = 'hive',
+        group = 'hadoop',
+        recursive = True,
     )
     self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0600,
-      conf_dir = '/etc/hive/conf.server',
-      configurations = self.getConfig()['configurations']['mapred-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
+        group = 'hadoop',
+        conf_dir = '/etc/hive/conf',
+        mode = 0644,
+        configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
+        owner = 'hive',
+        configurations = self.getConfig()['configurations']['mapred-site'],
     )
     self.assertResourceCalled('XmlConfig', 'hive-site.xml',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0600,
-      conf_dir = '/etc/hive/conf.server',
-      configurations = self.getConfig()['configurations']['hive-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['hive-site']
-    )
-    self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf -x \"\" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
-      not_if = '[ -f DBConnectionVerification.jar]',
-      environment = {'no_proxy': 'c6401.ambari.apache.org'}
+        group = 'hadoop',
+        conf_dir = '/etc/hive/conf',
+        mode = 0644,
+        configuration_attributes = self.getConfig()['configuration_attributes']['hive-site'],
+        owner = 'hive',
+        configurations = self.getConfig()['configurations']['hive-site'],
     )
     self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh',
-      content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
-      owner = 'hive',
-      group = 'hadoop',
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
-      content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
-      owner = 'hive',
-      group = 'hadoop',
+        content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
+        owner = 'hive',
+        group = 'hadoop',
     )
     self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
-      owner = 'hive',
-      group = 'hadoop',
+        owner = 'hive',
+        group = 'hadoop',
     )
     self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
-      owner = 'hive',
-      group = 'hadoop',
-    )
-    self.assertResourceCalled('File',
-                              '/etc/hive/conf/hive-exec-log4j.properties',
-                              mode=0644,
-                              group='hadoop',
-                              owner='hive',
-                              content='log4jproperties\nline2'
-    )
-    self.assertResourceCalled('File',
-                              '/etc/hive/conf/hive-log4j.properties',
-                              mode=0644,
-                              group='hadoop',
-                              owner='hive',
-                              content='log4jproperties\nline2'
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-exec-log4j.properties',
+        content = 'log4jproperties\nline2',
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0644,
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-log4j.properties',
+        content = 'log4jproperties\nline2',
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0644,
+    )
+    self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',
+        environment = {'no_proxy': 'c6401.ambari.apache.org'},
+        not_if = '[ -f DBConnectionVerification.jar]',
     )
     self.assertNoMoreResources()
 
@@ -117,83 +134,100 @@ class TestHiveClient(RMFTestCase):
                        command = "configure",
                        config_file="secured_client.json"
     )
-
-    self.assertResourceCalled('Directory', '/etc/hive/conf',
-      owner = 'hive',
-      group = 'hadoop',
-      recursive = True,
-    )
     self.assertResourceCalled('Directory', '/etc/hive/conf.server',
-      owner = 'hive',
-      group = 'hadoop',
-      recursive = True,
+        owner = 'hive',
+        group = 'hadoop',
+        recursive = True,
     )
     self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0644,
-      conf_dir = '/etc/hive/conf',
-      configurations = self.getConfig()['configurations']['mapred-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
+        group = 'hadoop',
+        conf_dir = '/etc/hive/conf.server',
+        mode = 0644,
+        configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
+        owner = 'hive',
+        configurations = self.getConfig()['configurations']['mapred-site'],
     )
     self.assertResourceCalled('XmlConfig', 'hive-site.xml',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0644,
-      conf_dir = '/etc/hive/conf',
-      configurations = self.getConfig()['configurations']['hive-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['hive-site']
+        group = 'hadoop',
+        conf_dir = '/etc/hive/conf.server',
+        mode = 0644,
+        configuration_attributes = self.getConfig()['configuration_attributes']['hive-site'],
+        owner = 'hive',
+        configurations = self.getConfig()['configurations']['hive-site'],
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
+        content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-default.xml.template',
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh.template',
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-exec-log4j.properties',
+        content = 'log4jproperties\nline2',
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0644,
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-log4j.properties',
+        content = 'log4jproperties\nline2',
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0644,
+    )
+    self.assertResourceCalled('Directory', '/etc/hive/conf',
+        owner = 'hive',
+        group = 'hadoop',
+        recursive = True,
     )
     self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0600,
-      conf_dir = '/etc/hive/conf.server',
-      configurations = self.getConfig()['configurations']['mapred-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
+        group = 'hadoop',
+        conf_dir = '/etc/hive/conf',
+        mode = 0644,
+        configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
+        owner = 'hive',
+        configurations = self.getConfig()['configurations']['mapred-site'],
     )
     self.assertResourceCalled('XmlConfig', 'hive-site.xml',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0600,
-      conf_dir = '/etc/hive/conf.server',
-      configurations = self.getConfig()['configurations']['hive-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['hive-site']
-    )
-    self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf -x \"\" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
-      not_if = '[ -f DBConnectionVerification.jar]',
-      environment = {'no_proxy': 'c6401.ambari.apache.org'}
+        group = 'hadoop',
+        conf_dir = '/etc/hive/conf',
+        mode = 0644,
+        configuration_attributes = self.getConfig()['configuration_attributes']['hive-site'],
+        owner = 'hive',
+        configurations = self.getConfig()['configurations']['hive-site'],
     )
     self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh',
-      content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
-      owner = 'hive',
-      group = 'hadoop',
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
-      content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
-      owner = 'hive',
-      group = 'hadoop',
+        content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
+        owner = 'hive',
+        group = 'hadoop',
     )
     self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
-      owner = 'hive',
-      group = 'hadoop',
+        owner = 'hive',
+        group = 'hadoop',
     )
     self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
-      owner = 'hive',
-      group = 'hadoop',
-    )
-    self.assertResourceCalled('File',
-                              '/etc/hive/conf/hive-exec-log4j.properties',
-                              mode=0644,
-                              group='hadoop',
-                              owner='hive',
-                              content='log4jproperties\nline2'
-    )
-    self.assertResourceCalled('File',
-                              '/etc/hive/conf/hive-log4j.properties',
-                              mode=0644,
-                              group='hadoop',
-                              owner='hive',
-                              content='log4jproperties\nline2'
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-exec-log4j.properties',
+        content = 'log4jproperties\nline2',
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0644,
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-log4j.properties',
+        content = 'log4jproperties\nline2',
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0644,
+    )
+    self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',
+        environment = {'no_proxy': 'c6401.ambari.apache.org'},
+        not_if = '[ -f DBConnectionVerification.jar]',
     )
     self.assertNoMoreResources()

+ 217 - 196
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py

@@ -38,29 +38,15 @@ class TestHiveMetastore(RMFTestCase):
     )
 
     self.assert_configure_default()
-    self.assertResourceCalled('File',
-                              '/etc/hive/conf/hive-exec-log4j.properties',
-                              mode=0644,
-                              group='hadoop',
-                              owner='hive',
-                              content='log4jproperties\nline2'
-    )
-    self.assertResourceCalled('File',
-                              '/etc/hive/conf/hive-log4j.properties',
-                              mode=0644,
-                              group='hadoop',
-                              owner='hive',
-                              content='log4jproperties\nline2'
-    )
     self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.log /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
-                              not_if = 'ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive.pid` >/dev/null 2>&1',
-                              user = 'hive'
+        not_if = 'ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive.pid` >/dev/null 2>&1',
+        user = 'hive',
     )
-
     self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/share/java/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
-                              path=['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'], tries=5, try_sleep=10
+        path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+        tries = 5,
+        try_sleep = 10,
     )
-
     self.assertNoMoreResources()
 
   def test_stop_default(self):
@@ -82,20 +68,6 @@ class TestHiveMetastore(RMFTestCase):
                        config_file="secured.json"
     )
     self.assert_configure_default()
-    self.assertResourceCalled('File',
-                              '/etc/hive/conf/hive-exec-log4j.properties',
-                              mode=0644,
-                              group='hadoop',
-                              owner='hive',
-                              content='log4jproperties\nline2'
-    )
-    self.assertResourceCalled('File',
-                              '/etc/hive/conf/hive-log4j.properties',
-                              mode=0644,
-                              group='hadoop',
-                              owner='hive',
-                              content='log4jproperties\nline2'
-    )
     self.assertNoMoreResources()
 
   def test_start_secured(self):
@@ -106,29 +78,15 @@ class TestHiveMetastore(RMFTestCase):
     )
 
     self.assert_configure_secured()
-    self.assertResourceCalled('File',
-                              '/etc/hive/conf/hive-exec-log4j.properties',
-                              mode=0644,
-                              group='hadoop',
-                              owner='hive',
-                              content='log4jproperties\nline2'
-    )
-    self.assertResourceCalled('File',
-                              '/etc/hive/conf/hive-log4j.properties',
-                              mode=0644,
-                              group='hadoop',
-                              owner='hive',
-                              content='log4jproperties\nline2'
-    )
     self.assertResourceCalled('Execute', 'env HADOOP_HOME=/usr JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.log /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
-                              not_if = 'ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive.pid` >/dev/null 2>&1',
-                              user = 'hive'
+        not_if = 'ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive.pid` >/dev/null 2>&1',
+        user = 'hive',
     )
-
     self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/share/java/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
-                              path=['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'], tries=5, try_sleep=10
+        path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+        tries = 5,
+        try_sleep = 10,
     )
-
     self.assertNoMoreResources()
 
   def test_stop_secured(self):
@@ -144,188 +102,251 @@ class TestHiveMetastore(RMFTestCase):
     self.assertNoMoreResources()
 
   def assert_configure_default(self):
-    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
-      creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
-      path = ['/bin', '/usr/bin/'],
-      not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
-    )
-    self.assertResourceCalled('Directory', '/etc/hive/conf',
-                              owner = 'hive',
-                              group = 'hadoop',
-                              recursive = True,
-                              )
     self.assertResourceCalled('Directory', '/etc/hive/conf.server',
-      owner = 'hive',
-      group = 'hadoop',
-      recursive = True,
+        owner = 'hive',
+        group = 'hadoop',
+        recursive = True,
     )
     self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-                              owner = 'hive',
-                              group = 'hadoop',
-                              mode = 0644,
-                              conf_dir = '/etc/hive/conf',
-                              configurations = self.getConfig()['configurations']['mapred-site'],
-                              configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
+        group = 'hadoop',
+        conf_dir = '/etc/hive/conf.server',
+        mode = 0644,
+        configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
+        owner = 'hive',
+        configurations = self.getConfig()['configurations']['mapred-site'],
     )
     self.assertResourceCalled('XmlConfig', 'hive-site.xml',
-                              owner = 'hive',
-                              group = 'hadoop',
-                              mode = 0644,
-                              conf_dir = '/etc/hive/conf',
-                              configurations = self.getConfig()['configurations']['hive-site'],
-                              configuration_attributes = self.getConfig()['configuration_attributes']['hive-site']
+        group = 'hadoop',
+        conf_dir = '/etc/hive/conf.server',
+        mode = 0644,
+        configuration_attributes = self.getConfig()['configuration_attributes']['hive-site'],
+        owner = 'hive',
+        configurations = self.getConfig()['configurations']['hive-site'],
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
+        content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-default.xml.template',
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh.template',
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-exec-log4j.properties',
+        content = 'log4jproperties\nline2',
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0644,
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-log4j.properties',
+        content = 'log4jproperties\nline2',
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0644,
+    )
+    self.assertResourceCalled('Directory', '/etc/hive/conf',
+        owner = 'hive',
+        group = 'hadoop',
+        recursive = True,
     )
     self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0600,
-      conf_dir = '/etc/hive/conf.server',
-      configurations = self.getConfig()['configurations']['mapred-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
+        group = 'hadoop',
+        conf_dir = '/etc/hive/conf',
+        mode = 0644,
+        configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
+        owner = 'hive',
+        configurations = self.getConfig()['configurations']['mapred-site'],
     )
     self.assertResourceCalled('XmlConfig', 'hive-site.xml',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0600,
-      conf_dir = '/etc/hive/conf.server',
-      configurations = self.getConfig()['configurations']['hive-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['hive-site']
-    )
-    self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf -x \"\" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
-      not_if = '[ -f DBConnectionVerification.jar]',
-      environment = {'no_proxy': 'c6401.ambari.apache.org'}
+        group = 'hadoop',
+        conf_dir = '/etc/hive/conf',
+        mode = 0644,
+        configuration_attributes = self.getConfig()['configuration_attributes']['hive-site'],
+        owner = 'hive',
+        configurations = self.getConfig()['configurations']['hive-site'],
     )
     self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh',
-                              content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
-                              owner = 'hive',
-                              group = 'hadoop',
-                              )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
-      content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
-      owner = 'hive',
-      group = 'hadoop',
+        content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-exec-log4j.properties',
+        content = 'log4jproperties\nline2',
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0644,
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-log4j.properties',
+        content = 'log4jproperties\nline2',
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0644,
+    )
+    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
+        creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
+        path = ['/bin', '/usr/bin/'],
+        not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
+    )
+    self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',
+        environment = {'no_proxy': 'c6401.ambari.apache.org'},
+        not_if = '[ -f DBConnectionVerification.jar]',
     )
     self.assertResourceCalled('File', '/tmp/start_metastore_script',
-      content = StaticFile('startMetastore.sh'),
-      mode = 0755,
+        content = StaticFile('startMetastore.sh'),
+        mode = 0755,
     )
     self.assertResourceCalled('Directory', '/var/run/hive',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0755,
-      recursive = True,
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0755,
+        recursive = True,
     )
     self.assertResourceCalled('Directory', '/var/log/hive',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0755,
-      recursive = True,
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0755,
+        recursive = True,
     )
     self.assertResourceCalled('Directory', '/var/lib/hive',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0755,
-      recursive = True,
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
-      owner = 'hive',
-      group = 'hadoop',
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
-      owner = 'hive',
-      group = 'hadoop',
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0755,
+        recursive = True,
     )
 
   def assert_configure_secured(self):
-    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
-      creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
-      path = ['/bin', '/usr/bin/'],
-      not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
-    )
-    self.assertResourceCalled('Directory', '/etc/hive/conf',
-                              owner = 'hive',
-                              group = 'hadoop',
-                              recursive = True,
-                              )
     self.assertResourceCalled('Directory', '/etc/hive/conf.server',
-      owner = 'hive',
-      group = 'hadoop',
-      recursive = True,
+        owner = 'hive',
+        group = 'hadoop',
+        recursive = True,
     )
     self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-                              owner = 'hive',
-                              group = 'hadoop',
-                              mode = 0644,
-                              conf_dir = '/etc/hive/conf',
-                              configurations = self.getConfig()['configurations']['mapred-site'],
-                              configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
+        group = 'hadoop',
+        conf_dir = '/etc/hive/conf.server',
+        mode = 0644,
+        configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
+        owner = 'hive',
+        configurations = self.getConfig()['configurations']['mapred-site'],
     )
     self.assertResourceCalled('XmlConfig', 'hive-site.xml',
-                              owner = 'hive',
-                              group = 'hadoop',
-                              mode = 0644,
-                              conf_dir = '/etc/hive/conf',
-                              configurations = self.getConfig()['configurations']['hive-site'],
-                              configuration_attributes = self.getConfig()['configuration_attributes']['hive-site']
+        group = 'hadoop',
+        conf_dir = '/etc/hive/conf.server',
+        mode = 0644,
+        configuration_attributes = self.getConfig()['configuration_attributes']['hive-site'],
+        owner = 'hive',
+        configurations = self.getConfig()['configurations']['hive-site'],
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
+        content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-default.xml.template',
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh.template',
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-exec-log4j.properties',
+        content = 'log4jproperties\nline2',
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0644,
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-log4j.properties',
+        content = 'log4jproperties\nline2',
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0644,
+    )
+    self.assertResourceCalled('Directory', '/etc/hive/conf',
+        owner = 'hive',
+        group = 'hadoop',
+        recursive = True,
     )
     self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0600,
-      conf_dir = '/etc/hive/conf.server',
-      configurations = self.getConfig()['configurations']['mapred-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
+        group = 'hadoop',
+        conf_dir = '/etc/hive/conf',
+        mode = 0644,
+        configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
+        owner = 'hive',
+        configurations = self.getConfig()['configurations']['mapred-site'],
     )
     self.assertResourceCalled('XmlConfig', 'hive-site.xml',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0600,
-      conf_dir = '/etc/hive/conf.server',
-      configurations = self.getConfig()['configurations']['hive-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['hive-site']
-    )
-    self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf -x \"\" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
-      not_if = '[ -f DBConnectionVerification.jar]',
-      environment = {'no_proxy': 'c6401.ambari.apache.org'}
+        group = 'hadoop',
+        conf_dir = '/etc/hive/conf',
+        mode = 0644,
+        configuration_attributes = self.getConfig()['configuration_attributes']['hive-site'],
+        owner = 'hive',
+        configurations = self.getConfig()['configurations']['hive-site'],
     )
     self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh',
-                              content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
-                              owner = 'hive',
-                              group = 'hadoop',
-                              )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
-      content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
-      owner = 'hive',
-      group = 'hadoop',
+        content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-exec-log4j.properties',
+        content = 'log4jproperties\nline2',
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0644,
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-log4j.properties',
+        content = 'log4jproperties\nline2',
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0644,
+    )
+    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
+        creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
+        path = ['/bin', '/usr/bin/'],
+        not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
+    )
+    self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',
+        environment = {'no_proxy': 'c6401.ambari.apache.org'},
+        not_if = '[ -f DBConnectionVerification.jar]',
     )
     self.assertResourceCalled('File', '/tmp/start_metastore_script',
-      content = StaticFile('startMetastore.sh'),
-      mode = 0755,
+        content = StaticFile('startMetastore.sh'),
+        mode = 0755,
     )
     self.assertResourceCalled('Directory', '/var/run/hive',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0755,
-      recursive = True,
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0755,
+        recursive = True,
     )
     self.assertResourceCalled('Directory', '/var/log/hive',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0755,
-      recursive = True,
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0755,
+        recursive = True,
     )
     self.assertResourceCalled('Directory', '/var/lib/hive',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0755,
-      recursive = True,
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
-      owner = 'hive',
-      group = 'hadoop',
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
-      owner = 'hive',
-      group = 'hadoop',
-    )
-
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0755,
+        recursive = True,
+    )

+ 255 - 249
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py

@@ -31,20 +31,6 @@ class TestHiveServer(RMFTestCase):
                        config_file="default.json"
     )
     self.assert_configure_default()
-    self.assertResourceCalled('File',
-                              '/etc/hive/conf/hive-exec-log4j.properties',
-                              mode=0644,
-                              group='hadoop',
-                              owner='hive',
-                              content='log4jproperties\nline2'
-    )
-    self.assertResourceCalled('File',
-                              '/etc/hive/conf/hive-log4j.properties',
-                              mode=0644,
-                              group='hadoop',
-                              owner='hive',
-                              content='log4jproperties\nline2'
-    )
     self.assertNoMoreResources()
 
   @patch("hive_service.check_fs_root")
@@ -59,21 +45,6 @@ class TestHiveServer(RMFTestCase):
     )
 
     self.assert_configure_default()
-    self.assertResourceCalled('File',
-                              '/etc/hive/conf/hive-exec-log4j.properties',
-                              mode=0644,
-                              group='hadoop',
-                              owner='hive',
-                              content='log4jproperties\nline2'
-    )
-    self.assertResourceCalled('File',
-                              '/etc/hive/conf/hive-log4j.properties',
-                              mode=0644,
-                              group='hadoop',
-                              owner='hive',
-                              content='log4jproperties\nline2'
-    )
-
     self.assertResourceCalled('HdfsDirectory', '/apps/tez/',
                               action = ['create_delayed'],
                               mode = 0755,
@@ -157,20 +128,6 @@ class TestHiveServer(RMFTestCase):
                        config_file="secured.json"
     )
     self.assert_configure_secured()
-    self.assertResourceCalled('File',
-                              '/etc/hive/conf/hive-exec-log4j.properties',
-                              mode=0644,
-                              group='hadoop',
-                              owner='hive',
-                              content='log4jproperties\nline2'
-    )
-    self.assertResourceCalled('File',
-                              '/etc/hive/conf/hive-log4j.properties',
-                              mode=0644,
-                              group='hadoop',
-                              owner='hive',
-                              content='log4jproperties\nline2'
-    )
     self.assertNoMoreResources()
 
   @patch("hive_service.check_fs_root")
@@ -185,20 +142,6 @@ class TestHiveServer(RMFTestCase):
     )
 
     self.assert_configure_secured()
-    self.assertResourceCalled('File',
-                              '/etc/hive/conf/hive-exec-log4j.properties',
-                              mode=0644,
-                              group='hadoop',
-                              owner='hive',
-                              content='log4jproperties\nline2'
-    )
-    self.assertResourceCalled('File',
-                              '/etc/hive/conf/hive-log4j.properties',
-                              mode=0644,
-                              group='hadoop',
-                              owner='hive',
-                              content='log4jproperties\nline2'
-    )
     self.assertResourceCalled('Execute', 'env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.log /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
                               not_if = 'ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps `cat /var/run/hive/hive-server.pid` >/dev/null 2>&1',
                               user = 'hive'
@@ -230,245 +173,308 @@ class TestHiveServer(RMFTestCase):
 
   def assert_configure_default(self):
     self.assertResourceCalled('HdfsDirectory', '/apps/hive/warehouse',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              mode = 0777,
-                              owner = 'hive',
-                              action = ['create_delayed'],
-                              )
+        security_enabled = False,
+        keytab = UnknownConfigurationMock(),
+        conf_dir = '/etc/hadoop/conf',
+        hdfs_user = 'hdfs',
+        kinit_path_local = '/usr/bin/kinit',
+        mode = 0777,
+        owner = 'hive',
+        action = ['create_delayed'],
+    )
     self.assertResourceCalled('HdfsDirectory', '/user/hive',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              mode = 0700,
-                              owner = 'hive',
-                              action = ['create_delayed'],
-                              )
+        security_enabled = False,
+        keytab = UnknownConfigurationMock(),
+        conf_dir = '/etc/hadoop/conf',
+        hdfs_user = 'hdfs',
+        kinit_path_local = '/usr/bin/kinit',
+        mode = 0700,
+        owner = 'hive',
+        action = ['create_delayed'],
+    )
     self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              action = ['create'],
-                              )
-    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
-      creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
-      path = ['/bin', '/usr/bin/'],
-      not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
+        security_enabled = False,
+        keytab = UnknownConfigurationMock(),
+        conf_dir = '/etc/hadoop/conf',
+        hdfs_user = 'hdfs',
+        kinit_path_local = '/usr/bin/kinit',
+        action = ['create'],
     )
-    self.assertResourceCalled('Directory', '/etc/hive/conf',
-                              owner = 'hive',
-                              group = 'hadoop',
-                              recursive = True,
-                              )
     self.assertResourceCalled('Directory', '/etc/hive/conf.server',
-      owner = 'hive',
-      group = 'hadoop',
-      recursive = True,
+        owner = 'hive',
+        group = 'hadoop',
+        recursive = True,
     )
     self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-                              owner = 'hive',
-                              group = 'hadoop',
-                              mode = 0644,
-                              conf_dir = '/etc/hive/conf',
-                              configurations = self.getConfig()['configurations']['mapred-site'],
-                              configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
+        group = 'hadoop',
+        conf_dir = '/etc/hive/conf.server',
+        mode = 0644,
+        configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
+        owner = 'hive',
+        configurations = self.getConfig()['configurations']['mapred-site'],
     )
     self.assertResourceCalled('XmlConfig', 'hive-site.xml',
-                              owner = 'hive',
-                              group = 'hadoop',
-                              mode = 0644,
-                              conf_dir = '/etc/hive/conf',
-                              configurations = self.getConfig()['configurations']['hive-site'],
-                              configuration_attributes = self.getConfig()['configuration_attributes']['hive-site']
+        group = 'hadoop',
+        conf_dir = '/etc/hive/conf.server',
+        mode = 0644,
+        configuration_attributes = self.getConfig()['configuration_attributes']['hive-site'],
+        owner = 'hive',
+        configurations = self.getConfig()['configurations']['hive-site'],
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
+        content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-default.xml.template',
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh.template',
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-exec-log4j.properties',
+        content = 'log4jproperties\nline2',
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0644,
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-log4j.properties',
+        content = 'log4jproperties\nline2',
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0644,
+    )
+    self.assertResourceCalled('Directory', '/etc/hive/conf',
+        owner = 'hive',
+        group = 'hadoop',
+        recursive = True,
     )
     self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0600,
-      conf_dir = '/etc/hive/conf.server',
-      configurations = self.getConfig()['configurations']['mapred-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
+        group = 'hadoop',
+        conf_dir = '/etc/hive/conf',
+        mode = 0644,
+        configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
+        owner = 'hive',
+        configurations = self.getConfig()['configurations']['mapred-site'],
     )
     self.assertResourceCalled('XmlConfig', 'hive-site.xml',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0600,
-      conf_dir = '/etc/hive/conf.server',
-      configurations = self.getConfig()['configurations']['hive-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['hive-site']
-    )
-    self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf -x \"\" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
-      not_if = '[ -f DBConnectionVerification.jar]',
-      environment = {'no_proxy': 'c6401.ambari.apache.org'}
+        group = 'hadoop',
+        conf_dir = '/etc/hive/conf',
+        mode = 0644,
+        configuration_attributes = self.getConfig()['configuration_attributes']['hive-site'],
+        owner = 'hive',
+        configurations = self.getConfig()['configurations']['hive-site'],
     )
     self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh',
-                              content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
-                              owner = 'hive',
-                              group = 'hadoop',
-                              )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
-      content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
-      owner = 'hive',
-      group = 'hadoop',
+        content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-exec-log4j.properties',
+        content = 'log4jproperties\nline2',
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0644,
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-log4j.properties',
+        content = 'log4jproperties\nline2',
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0644,
+    )
+    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
+        creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
+        path = ['/bin', '/usr/bin/'],
+        not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
+    )
+    self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',
+        environment = {'no_proxy': 'c6401.ambari.apache.org'},
+        not_if = '[ -f DBConnectionVerification.jar]',
     )
     self.assertResourceCalled('File', '/tmp/start_hiveserver2_script',
-      content = Template('startHiveserver2.sh.j2'),
-      mode = 0755,
+        content = Template('startHiveserver2.sh.j2'),
+        mode = 0755,
     )
     self.assertResourceCalled('Directory', '/var/run/hive',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0755,
-      recursive = True,
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0755,
+        recursive = True,
     )
     self.assertResourceCalled('Directory', '/var/log/hive',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0755,
-      recursive = True,
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0755,
+        recursive = True,
     )
     self.assertResourceCalled('Directory', '/var/lib/hive',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0755,
-      recursive = True,
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
-      owner = 'hive',
-      group = 'hadoop',
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0755,
+        recursive = True,
     )
-    self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
-      owner = 'hive',
-      group = 'hadoop',
-    )
-
+    
   def assert_configure_secured(self):
     self.assertResourceCalled('HdfsDirectory', '/apps/hive/warehouse',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0777,
-                              owner = 'hive',
-                              action = ['create_delayed'],
-                              )
+        security_enabled = True,
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        conf_dir = '/etc/hadoop/conf',
+        hdfs_user = 'hdfs',
+        kinit_path_local = '/usr/bin/kinit',
+        mode = 0777,
+        owner = 'hive',
+        action = ['create_delayed'],
+    )
     self.assertResourceCalled('HdfsDirectory', '/user/hive',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0700,
-                              owner = 'hive',
-                              action = ['create_delayed'],
-                              )
+        security_enabled = True,
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        conf_dir = '/etc/hadoop/conf',
+        hdfs_user = 'hdfs',
+        kinit_path_local = '/usr/bin/kinit',
+        mode = 0700,
+        owner = 'hive',
+        action = ['create_delayed'],
+    )
     self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              action = ['create'],
-                              )
-
-    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
-      creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
-      path = ['/bin', '/usr/bin/'],
-      not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
+        security_enabled = True,
+        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+        conf_dir = '/etc/hadoop/conf',
+        hdfs_user = 'hdfs',
+        kinit_path_local = '/usr/bin/kinit',
+        action = ['create'],
     )
-    self.assertResourceCalled('Directory', '/etc/hive/conf',
-                              owner = 'hive',
-                              group = 'hadoop',
-                              recursive = True,
-                              )
     self.assertResourceCalled('Directory', '/etc/hive/conf.server',
-      owner = 'hive',
-      group = 'hadoop',
-      recursive = True,
+        owner = 'hive',
+        group = 'hadoop',
+        recursive = True,
     )
     self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-                              owner = 'hive',
-                              group = 'hadoop',
-                              mode = 0644,
-                              conf_dir = '/etc/hive/conf',
-                              configurations = self.getConfig()['configurations']['mapred-site'],
-                              configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
+        group = 'hadoop',
+        conf_dir = '/etc/hive/conf.server',
+        mode = 0644,
+        configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
+        owner = 'hive',
+        configurations = self.getConfig()['configurations']['mapred-site'],
     )
     self.assertResourceCalled('XmlConfig', 'hive-site.xml',
-                              owner = 'hive',
-                              group = 'hadoop',
-                              mode = 0644,
-                              conf_dir = '/etc/hive/conf',
-                              configurations = self.getConfig()['configurations']['hive-site'],
-                              configuration_attributes = self.getConfig()['configuration_attributes']['hive-site']
+        group = 'hadoop',
+        conf_dir = '/etc/hive/conf.server',
+        mode = 0644,
+        configuration_attributes = self.getConfig()['configuration_attributes']['hive-site'],
+        owner = 'hive',
+        configurations = self.getConfig()['configurations']['hive-site'],
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
+        content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-default.xml.template',
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh.template',
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-exec-log4j.properties',
+        content = 'log4jproperties\nline2',
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0644,
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-log4j.properties',
+        content = 'log4jproperties\nline2',
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0644,
+    )
+    self.assertResourceCalled('Directory', '/etc/hive/conf',
+        owner = 'hive',
+        group = 'hadoop',
+        recursive = True,
     )
     self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0600,
-      conf_dir = '/etc/hive/conf.server',
-      configurations = self.getConfig()['configurations']['mapred-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
+        group = 'hadoop',
+        conf_dir = '/etc/hive/conf',
+        mode = 0644,
+        configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
+        owner = 'hive',
+        configurations = self.getConfig()['configurations']['mapred-site'],
     )
     self.assertResourceCalled('XmlConfig', 'hive-site.xml',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0600,
-      conf_dir = '/etc/hive/conf.server',
-      configurations = self.getConfig()['configurations']['hive-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['hive-site']
-    )
-    self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf -x \"\" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
-      not_if = '[ -f DBConnectionVerification.jar]',
-      environment = {'no_proxy': 'c6401.ambari.apache.org'}
+        group = 'hadoop',
+        conf_dir = '/etc/hive/conf',
+        mode = 0644,
+        configuration_attributes = self.getConfig()['configuration_attributes']['hive-site'],
+        owner = 'hive',
+        configurations = self.getConfig()['configurations']['hive-site'],
     )
     self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh',
-                              content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
-                              owner = 'hive',
-                              group = 'hadoop',
-                              )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
-      content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
-      owner = 'hive',
-      group = 'hadoop',
+        content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-exec-log4j.properties',
+        content = 'log4jproperties\nline2',
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0644,
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-log4j.properties',
+        content = 'log4jproperties\nline2',
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0644,
+    )
+    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
+        creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
+        path = ['/bin', '/usr/bin/'],
+        not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
+    )
+    self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',
+        environment = {'no_proxy': 'c6401.ambari.apache.org'},
+        not_if = '[ -f DBConnectionVerification.jar]',
     )
     self.assertResourceCalled('File', '/tmp/start_hiveserver2_script',
-      content = Template('startHiveserver2.sh.j2'),
-      mode = 0755,
+        content = Template('startHiveserver2.sh.j2'),
+        mode = 0755,
     )
     self.assertResourceCalled('Directory', '/var/run/hive',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0755,
-      recursive = True,
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0755,
+        recursive = True,
     )
     self.assertResourceCalled('Directory', '/var/log/hive',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0755,
-      recursive = True,
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0755,
+        recursive = True,
     )
     self.assertResourceCalled('Directory', '/var/lib/hive',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0755,
-      recursive = True,
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
-      owner = 'hive',
-      group = 'hadoop',
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
-      owner = 'hive',
-      group = 'hadoop',
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0755,
+        recursive = True,
     )
 
   @patch("hive_service.check_fs_root")

+ 156 - 1
ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py

@@ -174,7 +174,7 @@ class TestHDP206StackAdvisor(TestCase):
     result = self.stackAdvisor.validateConfigurations(services, hosts)
 
     expectedItems = [
-      {"message": "Value is less than the recommended default of 2046", "level": "WARN"},
+      {"message": "Value is less than the recommended default of 2048", "level": "WARN"},
       {"message": "Value should be integer", "level": "ERROR"},
       {"message": "Value should be set", "level": "ERROR"}
     ]
@@ -235,6 +235,161 @@ class TestHDP206StackAdvisor(TestCase):
     ]
     self.assertValidationResult(expectedItems, result)
 
+  def test_getClusterData_withHBaseAnd6gbRam(self):
+    servicesList = ["HBASE"]
+    components = []
+    hosts = {
+      "items" : [
+        {
+          "Hosts" : {
+            "cpu_count" : 8,
+            "total_mem" : 6291456,
+            "disk_info" : [
+              {"mountpoint" : "/"},
+              {"mountpoint" : "/dev/shm"},
+              {"mountpoint" : "/vagrant"},
+              {"mountpoint" : "/"},
+              {"mountpoint" : "/dev/shm"},
+              {"mountpoint" : "/"},
+              {"mountpoint" : "/dev/shm"},
+              {"mountpoint" : "/vagrant"}
+            ]
+          }
+        }
+      ]
+    }
+    expected = {
+      "hBaseInstalled": True,
+      "components": components,
+      "cpu": 8,
+      "disk": 8,
+      "ram": 6,
+      "reservedRam": 2,
+      "hbaseRam": 1,
+      "minContainerSize": 512,
+      "totalAvailableRam": 3072,
+      "containers": 6,
+      "ramPerContainer": 512,
+      "mapMemory": 512,
+      "reduceMemory": 512,
+      "amMemory": 512
+    }
+
+    result = self.stackAdvisor.getClusterData(servicesList, hosts, components)
+
+    self.assertEquals(result, expected)
+
+  def test_getClusterData_withHBaseAnd48gbRam(self):
+    servicesList = ["HBASE"]
+    components = []
+    hosts = {
+      "items" : [
+        {
+          "Hosts" : {
+            "cpu_count" : 6,
+            "total_mem" : 50331648,
+            "disk_info" : [
+              {"mountpoint" : "/"},
+              {"mountpoint" : "/dev/shm"},
+              {"mountpoint" : "/vagrant"},
+              {"mountpoint" : "/"},
+              {"mountpoint" : "/dev/shm"},
+              {"mountpoint" : "/vagrant"}
+            ]
+          }
+        }
+      ]
+    }
+    expected = {
+      "hBaseInstalled": True,
+      "components": components,
+      "cpu": 6,
+      "disk": 6,
+      "ram": 48,
+      "reservedRam": 6,
+      "hbaseRam": 8,
+      "minContainerSize": 2048,
+      "totalAvailableRam": 34816,
+      "containers": 11,
+      "ramPerContainer": 3072,
+      "mapMemory": 3072,
+      "reduceMemory": 3072,
+      "amMemory": 3072
+    }
+
+    result = self.stackAdvisor.getClusterData(servicesList, hosts, components)
+
+    self.assertEquals(result, expected)
+
+  def test_recommendYARNConfigurations(self):
+    configurations = {}
+    clusterData = {
+      "containers" : 5,
+      "ramPerContainer": 256
+    }
+    expected = {
+      "yarn-site": {
+        "properties": {
+          "yarn.nodemanager.resource.memory-mb": "1280",
+          "yarn.scheduler.minimum-allocation-mb": "256",
+          "yarn.scheduler.maximum-allocation-mb": "1280"
+        }
+      }
+    }
+
+    self.stackAdvisor.recommendYARNConfigurations(configurations, clusterData)
+    self.assertEquals(configurations, expected)
+
+  def test_recommendMapReduce2Configurations_mapMemoryLessThan2560(self):
+    configurations = {}
+    clusterData = {
+      "mapMemory": 567,
+      "reduceMemory": 345.6666666666666,
+      "amMemory": 123.54
+    }
+    expected = {
+      "mapred-site": {
+        "properties": {
+          "yarn.app.mapreduce.am.resource.mb": "123",
+          "yarn.app.mapreduce.am.command-opts": "-Xmx99m",
+          "mapreduce.map.memory.mb": "567",
+          "mapreduce.reduce.memory.mb": "345",
+          "mapreduce.map.java.opts": "-Xmx454m",
+          "mapreduce.reduce.java.opts": "-Xmx277m",
+          "mapreduce.task.io.sort.mb": "227"
+        }
+      }
+    }
+
+    self.stackAdvisor.recommendMapReduce2Configurations(configurations, clusterData)
+    self.assertEquals(configurations, expected)
+
+  def test_getClusterData_noHostsWithoutHBase(self):
+    servicesList = []
+    components = []
+    hosts = {
+      "items" : []
+    }
+    result = self.stackAdvisor.getClusterData(servicesList, hosts, components)
+
+    expected = {
+      "hBaseInstalled": False,
+      "components": components,
+      "cpu": 0,
+      "disk": 0,
+      "ram": 0,
+      "reservedRam": 1,
+      "hbaseRam": 1,
+      "minContainerSize": 256,
+      "totalAvailableRam": 2048,
+      "containers": 3,
+      "ramPerContainer": 682.6666666666666,
+      "mapMemory": 682,
+      "reduceMemory": 682.6666666666666,
+      "amMemory": 682.6666666666666
+    }
+
+    self.assertEquals(result, expected)
 
   def prepareHosts(self, hostsNames):
     hosts = { "items": [] }

+ 159 - 145
ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py

@@ -104,195 +104,209 @@ class TestHiveMetastore(RMFTestCase):
     self.assertNoMoreResources()
 
   def assert_configure_default(self):
-    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
-      creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
-      path = ['/bin', '/usr/bin/'],
-      not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
-    )
-    self.assertResourceCalled('Directory', '/etc/hive/conf',
-      owner = 'hive',
-      group = 'hadoop',
-      recursive = True,
-    )
     self.assertResourceCalled('Directory', '/etc/hive/conf.server',
-      owner = 'hive',
-      group = 'hadoop',
-      recursive = True,
+        owner = 'hive',
+        group = 'hadoop',
+        recursive = True,
     )
     self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-      owner='hive',
-      group='hadoop',
-      mode=0644,
-      conf_dir='/etc/hive/conf',
-      configurations=self.getConfig()['configurations']['mapred-site'],
-      configuration_attributes=self.getConfig()['configuration_attributes']['mapred-site']
+        group = 'hadoop',
+        conf_dir = '/etc/hive/conf.server',
+        mode = 0644,
+        configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
+        owner = 'hive',
+        configurations = self.getConfig()['configurations']['mapred-site'],
     )
     self.assertResourceCalled('XmlConfig', 'hive-site.xml',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0644,
-      conf_dir = '/etc/hive/conf',
-      configurations = self.getConfig()['configurations']['hive-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['hive-site']
+        group = 'hadoop',
+        conf_dir = '/etc/hive/conf.server',
+        mode = 0644,
+        configuration_attributes = self.getConfig()['configuration_attributes']['hive-site'],
+        owner = 'hive',
+        configurations = self.getConfig()['configurations']['hive-site'],
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
+        content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-default.xml.template',
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh.template',
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('Directory', '/etc/hive/conf',
+        owner = 'hive',
+        group = 'hadoop',
+        recursive = True,
     )
     self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0600,
-      conf_dir = '/etc/hive/conf.server',
-      configurations = self.getConfig()['configurations']['mapred-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
+        group = 'hadoop',
+        conf_dir = '/etc/hive/conf',
+        mode = 0644,
+        configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
+        owner = 'hive',
+        configurations = self.getConfig()['configurations']['mapred-site'],
     )
     self.assertResourceCalled('XmlConfig', 'hive-site.xml',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0600,
-      conf_dir = '/etc/hive/conf.server',
-      configurations = self.getConfig()['configurations']['hive-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['hive-site']
-    )
-    self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf -x \"\" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
-      not_if = '[ -f DBConnectionVerification.jar]',
-      environment = {'no_proxy': 'c6401.ambari.apache.org'},
+        group = 'hadoop',
+        conf_dir = '/etc/hive/conf',
+        mode = 0644,
+        configuration_attributes = self.getConfig()['configuration_attributes']['hive-site'],
+        owner = 'hive',
+        configurations = self.getConfig()['configurations']['hive-site'],
     )
     self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh',
-      content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
-      owner = 'hive',
-      group = 'hadoop',
+        content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
+        owner = 'hive',
+        group = 'hadoop',
     )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
-      content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
-      owner = 'hive',
-      group = 'hadoop',
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
+        creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
+        path = ['/bin', '/usr/bin/'],
+        not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
+    )
+    self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',
+        environment = {'no_proxy': 'c6401.ambari.apache.org'},
+        not_if = '[ -f DBConnectionVerification.jar]',
     )
     self.assertResourceCalled('File', '/tmp/start_metastore_script',
-      content = StaticFile('startMetastore.sh'),
-      mode = 0755,
+        content = StaticFile('startMetastore.sh'),
+        mode = 0755,
     )
     self.assertResourceCalled('Execute', 'export HIVE_CONF_DIR=/etc/hive/conf.server ; /usr/lib/hive/bin/schematool -initSchema -dbType mysql -userName hive -passWord aaa',
         not_if = 'export HIVE_CONF_DIR=/etc/hive/conf.server ; /usr/lib/hive/bin/schematool -info -dbType mysql -userName hive -passWord aaa',
     )
     self.assertResourceCalled('Directory', '/var/run/hive',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0755,
-      recursive = True,
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0755,
+        recursive = True,
     )
     self.assertResourceCalled('Directory', '/var/log/hive',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0755,
-      recursive = True,
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0755,
+        recursive = True,
     )
     self.assertResourceCalled('Directory', '/var/lib/hive',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0755,
-      recursive = True,
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
-      owner = 'hive',
-      group = 'hadoop',
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
-      owner = 'hive',
-      group = 'hadoop',
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0755,
+        recursive = True,
     )
 
   def assert_configure_secured(self):
-    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
-      creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
-      path = ['/bin', '/usr/bin/'],
-      not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
-    )
-    self.assertResourceCalled('Directory', '/etc/hive/conf',
-      owner = 'hive',
-      group = 'hadoop',
-      recursive = True,
-    )
     self.assertResourceCalled('Directory', '/etc/hive/conf.server',
-      owner = 'hive',
-      group = 'hadoop',
-      recursive = True,
+        owner = 'hive',
+        group = 'hadoop',
+        recursive = True,
     )
     self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0644,
-      conf_dir = '/etc/hive/conf',
-      configurations = self.getConfig()['configurations']['mapred-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
+        group = 'hadoop',
+        conf_dir = '/etc/hive/conf.server',
+        mode = 0644,
+        configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
+        owner = 'hive',
+        configurations = self.getConfig()['configurations']['mapred-site'],
     )
     self.assertResourceCalled('XmlConfig', 'hive-site.xml',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0644,
-      conf_dir = '/etc/hive/conf',
-      configurations = self.getConfig()['configurations']['hive-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['hive-site']
+        group = 'hadoop',
+        conf_dir = '/etc/hive/conf.server',
+        mode = 0644,
+        configuration_attributes = self.getConfig()['configuration_attributes']['hive-site'],
+        owner = 'hive',
+        configurations = self.getConfig()['configurations']['hive-site'],
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
+        content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-default.xml.template',
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh.template',
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('Directory', '/etc/hive/conf',
+        owner = 'hive',
+        group = 'hadoop',
+        recursive = True,
     )
     self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0600,
-      conf_dir = '/etc/hive/conf.server',
-      configurations = self.getConfig()['configurations']['mapred-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
+        group = 'hadoop',
+        conf_dir = '/etc/hive/conf',
+        mode = 0644,
+        configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site'],
+        owner = 'hive',
+        configurations = self.getConfig()['configurations']['mapred-site'],
     )
     self.assertResourceCalled('XmlConfig', 'hive-site.xml',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0600,
-      conf_dir = '/etc/hive/conf.server',
-      configurations = self.getConfig()['configurations']['hive-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['hive-site']
-    )
-    self.assertResourceCalled('Execute', "/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf -x \"\" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar'",
-      not_if = '[ -f DBConnectionVerification.jar]',
-      environment = {'no_proxy': 'c6401.ambari.apache.org'},
+        group = 'hadoop',
+        conf_dir = '/etc/hive/conf',
+        mode = 0644,
+        configuration_attributes = self.getConfig()['configuration_attributes']['hive-site'],
+        owner = 'hive',
+        configurations = self.getConfig()['configurations']['hive-site'],
     )
     self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh',
-      content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
-      owner = 'hive',
-      group = 'hadoop',
+        content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
+        owner = 'hive',
+        group = 'hadoop',
     )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
-      content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
-      owner = 'hive',
-      group = 'hadoop',
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
+        owner = 'hive',
+        group = 'hadoop',
+    )
+    self.assertResourceCalled('Execute', 'hive mkdir -p /tmp/AMBARI-artifacts/ ; cp /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib//mysql-connector-java.jar',
+        creates = '/usr/lib/hive/lib//mysql-connector-java.jar',
+        path = ['/bin', '/usr/bin/'],
+        not_if = 'test -f /usr/lib/hive/lib//mysql-connector-java.jar',
+    )
+    self.assertResourceCalled('Execute', '/bin/sh -c \'cd /usr/lib/ambari-agent/ && curl -kf -x "" --retry 5 http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar -o DBConnectionVerification.jar\'',
+        environment = {'no_proxy': 'c6401.ambari.apache.org'},
+        not_if = '[ -f DBConnectionVerification.jar]',
     )
     self.assertResourceCalled('File', '/tmp/start_metastore_script',
-      content = StaticFile('startMetastore.sh'),
-      mode = 0755,
+        content = StaticFile('startMetastore.sh'),
+        mode = 0755,
     )
-    self.assertResourceCalled('Execute', "export HIVE_CONF_DIR=/etc/hive/conf.server ; /usr/lib/hive/bin/schematool -initSchema -dbType mysql -userName hive -passWord asd",
-      not_if = 'export HIVE_CONF_DIR=/etc/hive/conf.server ; /usr/lib/hive/bin/schematool -info -dbType mysql -userName hive -passWord asd',
+    self.assertResourceCalled('Execute', 'export HIVE_CONF_DIR=/etc/hive/conf.server ; /usr/lib/hive/bin/schematool -initSchema -dbType mysql -userName hive -passWord asd',
+        not_if = 'export HIVE_CONF_DIR=/etc/hive/conf.server ; /usr/lib/hive/bin/schematool -info -dbType mysql -userName hive -passWord asd',
     )
     self.assertResourceCalled('Directory', '/var/run/hive',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0755,
-      recursive = True,
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0755,
+        recursive = True,
     )
     self.assertResourceCalled('Directory', '/var/log/hive',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0755,
-      recursive = True,
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0755,
+        recursive = True,
     )
     self.assertResourceCalled('Directory', '/var/lib/hive',
-      owner = 'hive',
-      group = 'hadoop',
-      mode = 0755,
-      recursive = True,
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf/hive-default.xml.template',
-      owner = 'hive',
-      group = 'hadoop',
-    )
-    self.assertResourceCalled('File', '/etc/hive/conf/hive-env.sh.template',
-      owner = 'hive',
-      group = 'hadoop',
-    )
-
-
+        owner = 'hive',
+        group = 'hadoop',
+        mode = 0755,
+        recursive = True,
+    )

+ 132 - 0
ambari-server/src/test/python/stacks/2.1/common/test_stack_advisor.py

@@ -0,0 +1,132 @@
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+import socket
+from unittest import TestCase
+
+class TestHDP21StackAdvisor(TestCase):
+
+  def setUp(self):
+    import imp
+    import os
+
+    testDirectory = os.path.dirname(os.path.abspath(__file__))
+    stackAdvisorPath = os.path.join(testDirectory, '../../../../../main/resources/stacks/stack_advisor.py')
+    hdp206StackAdvisorPath = os.path.join(testDirectory, '../../../../../main/resources/stacks/HDP/2.0.6/services/stack_advisor.py')
+    hdp21StackAdvisorPath = os.path.join(testDirectory, '../../../../../main/resources/stacks/HDP/2.1/services/stack_advisor.py')
+    hdp21StackAdvisorClassName = 'HDP21StackAdvisor'
+    with open(stackAdvisorPath, 'rb') as fp:
+      imp.load_module('stack_advisor', fp, stackAdvisorPath, ('.py', 'rb', imp.PY_SOURCE))
+    with open(hdp206StackAdvisorPath, 'rb') as fp:
+      imp.load_module('stack_advisor_impl', fp, hdp206StackAdvisorPath, ('.py', 'rb', imp.PY_SOURCE))
+    with open(hdp21StackAdvisorPath, 'rb') as fp:
+      stack_advisor_impl = imp.load_module('stack_advisor_impl', fp, hdp21StackAdvisorPath, ('.py', 'rb', imp.PY_SOURCE))
+    clazz = getattr(stack_advisor_impl, hdp21StackAdvisorClassName)
+    self.stackAdvisor = clazz()
+
+  def test_recommendOozieConfigurations_noFalconServer(self):
+    configurations = {}
+    clusterData = {
+      "components" : []
+    }
+    expected = {
+    }
+
+    self.stackAdvisor.recommendOozieConfigurations(configurations, clusterData)
+    self.assertEquals(configurations, expected)
+
+  def test_recommendOozieConfigurations_withFalconServer(self):
+    configurations = {}
+    clusterData = {
+      "components" : ["FALCON_SERVER"]
+    }
+    expected = {
+      "oozie-site": {
+        "properties": {
+          "oozie.services.ext": "org.apache.oozie.service.JMSAccessorService," +
+                                "org.apache.oozie.service.PartitionDependencyManagerService," +
+                                "org.apache.oozie.service.HCatAccessorService"
+        }
+      }
+    }
+
+    self.stackAdvisor.recommendOozieConfigurations(configurations, clusterData)
+    self.assertEquals(configurations, expected)
+
+  def test_recommendHiveConfigurations_mapMemoryLessThan2048(self):
+    configurations = {}
+    clusterData = {
+      "mapMemory": 567,
+      "reduceMemory": 2056,
+      "containers": 3,
+      "ramPerContainer": 1024
+    }
+    expected = {
+      "hive-site": {
+        "properties": {
+          "hive.auto.convert.join.noconditionaltask.size": "718274560",
+          "hive.tez.java.opts": "-server -Xmx1645m -Djava.net.preferIPv4Stack=true -XX:NewRatio=8 -XX:+UseNUMA -XX:+UseParallelGC",
+          "hive.tez.container.size": "2056"
+        }
+      }
+    }
+
+    self.stackAdvisor.recommendHiveConfigurations(configurations, clusterData)
+    self.assertEquals(configurations, expected)
+
+  def test_recommendHiveConfigurations_mapMemoryMoreThan2048(self):
+    configurations = {}
+    clusterData = {
+      "mapMemory": 3000,
+      "reduceMemory": 2056,
+      "containers": 3,
+      "ramPerContainer": 1024
+    }
+    expected = {
+      "hive-site": {
+        "properties": {
+          "hive.auto.convert.join.noconditionaltask.size": "1048576000",
+          "hive.tez.java.opts": "-server -Xmx2400m -Djava.net.preferIPv4Stack=true -XX:NewRatio=8 -XX:+UseNUMA -XX:+UseParallelGC",
+          "hive.tez.container.size": "3000"
+        }
+      }
+    }
+
+    self.stackAdvisor.recommendHiveConfigurations(configurations, clusterData)
+    self.assertEquals(configurations, expected)
+
+  def test_recommendHiveConfigurations_containersRamIsLess(self):
+    configurations = {}
+    clusterData = {
+      "mapMemory": 3000,
+      "reduceMemory": 2056,
+      "containers": 3,
+      "ramPerContainer": 256
+    }
+    expected = {
+      "hive-site": {
+        "properties": {
+          "hive.auto.convert.join.noconditionaltask.size": "268435456",
+          "hive.tez.java.opts": "-server -Xmx614m -Djava.net.preferIPv4Stack=true -XX:NewRatio=8 -XX:+UseNUMA -XX:+UseParallelGC",
+          "hive.tez.container.size": "768"
+        }
+      }
+    }
+
+    self.stackAdvisor.recommendHiveConfigurations(configurations, clusterData)
+    self.assertEquals(configurations, expected)

+ 2 - 1
ambari-web/app/app.js

@@ -87,7 +87,8 @@ module.exports = Em.Application.create({
    */
   isHaEnabled: function () {
     if (!this.get('isHadoop2Stack')) return false;
-    return !this.HostComponent.find().someProperty('componentName', 'SECONDARY_NAMENODE');
+    var isHDFSInstalled = App.Service.find().findProperty('serviceName','HDFS');
+    return !!isHDFSInstalled && !this.HostComponent.find().someProperty('componentName', 'SECONDARY_NAMENODE');
   }.property('router.clusterController.isLoaded', 'isHadoop2Stack'),
 
   /**

+ 2 - 4
ambari-web/app/assets/licenses/NOTICE.txt

@@ -10,7 +10,7 @@ This product includes Ember.js (http://emberjs.com - https://github.com/emberjs/
 Copyright (c) 2011, Yehuda Katz, Tom Dale, Charles Jolley and Ember.js contributors
 
 This product was generated using Brunch (https://github.com/brunch/brunch/blob/master/LICENSE)
-Copyright (c) 2011, Allan Berger, Jan Monschke, Martin Schürrer, Thomas Schranz, Nik Graf, Paul Miller
+Copyright (c) 2011, Allan Berger, Jan Monschke, Martin Sch\u00FCrrer, Thomas Schranz, Nik Graf, Paul Miller
 
 This product includes Twitter Bootstrap 2 (http://twitter.github.com/bootstrap/ - Apache License v2.0.)
 
@@ -30,7 +30,7 @@ Copyright (c) 2012, Michael Bostock.
 This product includes bootstrap-datepicker.js (http://www.eyecon.ro/bootstrap-datepicker - Apache License, Version 2.0)
 Copyright (c) 2012 Stefan Petre
 
-This product includes Font Awesome 3.2.1 (http://fortawesome.github.com/Font-Awesome - Creative Commons 3.0)
+This product includes Font Awesome 3.2.1 (http://fortawesome.github.com/Font-Awesome - SIL OFL 1.1)
 Copyright (c) 2013 Dave Gandy
 
 This product includes Rickshaw 1.1.2 (http://code.shutterstock.com/rickshaw/ - MIT License)
@@ -43,5 +43,3 @@ This product includes Spin.js (http://fgnass.github.com/spin.js/ - MIT license)
 Copyright (c) 2011 Felix Gnass [fgnass at neteye dot de]
 
 This product includes Moment.js (https://github.com/moment/moment/ - MIT license)
-
-This product includes iframeAutoHeight.js (http://github.com/house9/jquery-iframe-auto-height - MIT license)

+ 0 - 42
ambari-web/app/controllers/global/cluster_controller.js

@@ -78,7 +78,6 @@ App.ClusterController = Em.Controller.extend({
     'cluster': false,
     'clusterStatus': false,
     'racks': false,
-    'users': false,
     'componentConfigs': false,
     'componentsState': false
   }),
@@ -291,8 +290,6 @@ App.ClusterController = Em.Controller.extend({
       });
     }
 
-    this.loadUsersInfo();
-
     /**
      * Order of loading:
      * 1. request for service components supported by stack
@@ -386,45 +383,6 @@ App.ClusterController = Em.Controller.extend({
     console.warn('can\'t get ambari properties');
   },
 
-  /**
-   * Load info about users.
-   **/
-  loadUsersInfo: function() {
-    return App.ajax.send({
-      name: 'users.all',
-      sender: this,
-      success: 'loadUsersSuccess',
-      error: 'loadUsersError'
-    });
-  },
-
-  loadUsersSuccess: function(data) {
-    App.ajax.send({
-      name: 'users.privileges',
-      sender: this,
-      data: {
-        users: data
-      },
-      success: 'loadUsersPrivilegesSuccess'
-    });
-  },
-
-  loadUsersError: function() {
-    this.updateLoadStatus('users');
-  },
-  /**
-   * Load privileges, check relations between user and privilege,
-   * map users using <code>App.usersMappper</code>.
-   **/
-  loadUsersPrivilegesSuccess: function(data, opt, params) {
-    params.users.items.forEach(function(user) {
-      user.privileges = {};
-      user.privileges.items = data.items.filterProperty('PrivilegeInfo.principal_name', user.Users.user_name);
-    });
-    App.usersMapper.map(params.users);
-    this.updateLoadStatus('users');
-  },
-
   updateClusterData: function () {
     var testUrl = App.get('isHadoop2Stack') ? '/data/clusters/HDP2/cluster.json' : '/data/clusters/cluster.json';
     var clusterUrl = this.getUrl(testUrl, '?fields=Clusters');

Some files were not shown because too many files changed in this diff