Преглед изворни кода

Merge branch 'trunk' into branch-alerts-dev

Jonathan Hurley пре 10 година
родитељ
комит
b9cca71375
100 измењених фајлова са 1033 додато и 593 уклоњено
  1. 36 0
      ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/ambariViews/ViewsListCtrl.js
  2. 18 0
      ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/View.js
  3. 17 1
      ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
  4. 3 3
      ambari-admin/src/main/resources/ui/admin-web/app/views/leftNavbar.html
  5. 2 2
      ambari-agent/src/main/python/ambari_agent/ActionQueue.py
  6. 5 1
      ambari-agent/src/main/python/ambari_agent/Controller.py
  7. 1 1
      ambari-server/conf/unix/ambari.properties
  8. 12 0
      ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
  9. 81 28
      ambari-server/src/main/java/org/apache/ambari/server/security/ldap/AmbariLdapDataPopulator.java
  10. 1 1
      ambari-server/src/main/python/ambari-server.py
  11. 0 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/hook.py
  12. 4 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/params.py
  13. 8 29
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/shared_initialization.py
  14. 0 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/files/changeToSecureUid.sh
  15. 3 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/scripts/hook.py
  16. 102 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/scripts/params.py
  17. 58 0
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/scripts/shared_initialization.py
  18. 0 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/hook.py
  19. 0 35
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/shared_initialization.py
  20. 0 1
      ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/hook.py
  21. 1 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/hook.py
  22. 2 3
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
  23. 5 23
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
  24. 0 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/files/changeToSecureUid.sh
  25. 2 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/hook.py
  26. 106 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
  27. 56 0
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py
  28. 0 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/hook.py
  29. 0 35
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py
  30. 0 1
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/hook.py
  31. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
  32. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/params.py
  33. 2 7
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py
  34. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
  35. 4 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
  36. 23 9
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat.py
  37. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py
  38. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/params.py
  39. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py
  40. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
  41. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/params.py
  42. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/package/scripts/params.py
  43. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/package/scripts/params.py
  44. 0 0
      ambari-server/src/main/resources/stacks/HDP/2.2/configuration/cluster-env.xml
  45. 11 0
      ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/webhcat-site.xml
  46. 2 2
      ambari-server/src/main/resources/stacks/HDP/2.2/services/SLIDER/package/scripts/params.py
  47. 32 0
      ambari-server/src/test/java/org/apache/ambari/server/security/ldap/AmbariLdapDataPopulatorTest.java
  48. 1 1
      ambari-server/src/test/python/stacks/1.3.2/configs/default.hbasedecom.json
  49. 1 1
      ambari-server/src/test/python/stacks/1.3.2/configs/default.json
  50. 1 1
      ambari-server/src/test/python/stacks/1.3.2/configs/default.non_gmetad_host.json
  51. 1 1
      ambari-server/src/test/python/stacks/1.3.2/configs/default_client.json
  52. 1 1
      ambari-server/src/test/python/stacks/1.3.2/configs/secured.json
  53. 1 1
      ambari-server/src/test/python/stacks/1.3.2/configs/secured_client.json
  54. 1 1
      ambari-server/src/test/python/stacks/1.3.2/configs/secured_no_jce_name.json
  55. 0 13
      ambari-server/src/test/python/stacks/1.3.2/hooks/after-INSTALL/test_after_install.py
  56. 119 0
      ambari-server/src/test/python/stacks/1.3.2/hooks/before-ANY/test_before_any.py
  57. 2 108
      ambari-server/src/test/python/stacks/1.3.2/hooks/before-INSTALL/test_before_install.py
  58. 20 0
      ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
  59. 1 1
      ambari-server/src/test/python/stacks/2.0.6/configs/default.hbasedecom.json
  60. 1 1
      ambari-server/src/test/python/stacks/2.0.6/configs/default.json
  61. 1 1
      ambari-server/src/test/python/stacks/2.0.6/configs/default.non_gmetad_host.json
  62. 1 1
      ambari-server/src/test/python/stacks/2.0.6/configs/default_client.json
  63. 1 1
      ambari-server/src/test/python/stacks/2.0.6/configs/flume_target.json
  64. 1 1
      ambari-server/src/test/python/stacks/2.0.6/configs/ha_default.json
  65. 1 1
      ambari-server/src/test/python/stacks/2.0.6/configs/ha_secured.json
  66. 1 1
      ambari-server/src/test/python/stacks/2.0.6/configs/secured.json
  67. 1 1
      ambari-server/src/test/python/stacks/2.0.6/configs/secured_client.json
  68. 1 1
      ambari-server/src/test/python/stacks/2.0.6/configs/secured_no_jce_name.json
  69. 0 13
      ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
  70. 119 0
      ambari-server/src/test/python/stacks/2.0.6/hooks/before-ANY/test_before_any.py
  71. 2 108
      ambari-server/src/test/python/stacks/2.0.6/hooks/before-INSTALL/test_before_install.py
  72. 1 1
      ambari-server/src/test/python/stacks/2.1/configs/default.json
  73. 1 1
      ambari-server/src/test/python/stacks/2.1/configs/secured.json
  74. 1 1
      ambari-server/src/test/python/stacks/2.2/configs/default.json
  75. 1 1
      ambari-server/src/test/python/stacks/2.2/configs/secured.json
  76. 2 0
      ambari-web/app/app.js
  77. 1 1
      ambari-web/app/assets/data/requests/host_check/jdk_name.json
  78. 2 2
      ambari-web/app/controllers/application.js
  79. 22 0
      ambari-web/app/controllers/global/cluster_controller.js
  80. 2 2
      ambari-web/app/controllers/main.js
  81. 0 24
      ambari-web/app/controllers/main/host.js
  82. 1 0
      ambari-web/app/controllers/main/host/add_controller.js
  83. 5 1
      ambari-web/app/controllers/main/host/details.js
  84. 5 5
      ambari-web/app/controllers/main/service/info/configs.js
  85. 0 4
      ambari-web/app/controllers/main/views_controller.js
  86. 4 4
      ambari-web/app/controllers/wizard/step3_controller.js
  87. 2 2
      ambari-web/app/controllers/wizard/step7_controller.js
  88. 1 1
      ambari-web/app/controllers/wizard/step8_controller.js
  89. 13 1
      ambari-web/app/data/HDP2/site_properties.js
  90. 1 0
      ambari-web/app/mappers/service_config_version_mapper.js
  91. 5 2
      ambari-web/app/mappers/stack_service_mapper.js
  92. 1 1
      ambari-web/app/messages.js
  93. 9 52
      ambari-web/app/mixins/common/serverValidator.js
  94. 1 1
      ambari-web/app/models/repository.js
  95. 19 4
      ambari-web/app/models/service_config_version.js
  96. 2 1
      ambari-web/app/routes/installer.js
  97. 2 1
      ambari-web/app/routes/main.js
  98. 3 3
      ambari-web/app/templates/main/host/component_filter.hbs
  99. 2 2
      ambari-web/app/utils/ajax/ajax.js
  100. 28 5
      ambari-web/app/utils/components.js

+ 36 - 0
ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/ambariViews/ViewsListCtrl.js

@@ -19,10 +19,46 @@
 
 angular.module('ambariAdminConsole')
 .controller('ViewsListCtrl',['$scope', 'View', '$modal', 'uiAlert', 'ConfirmationModal', function($scope, View, $modal, uiAlert, ConfirmationModal) {
+
+  var deferredList = [];
+  $scope.$on('$locationChangeStart', function() {
+    deferredList.forEach(function(def) {
+      def.reject();
+    })
+  });
+
+  function checkViewVersionStatus(view, versionObj, versionNumber){
+    var deferred = View.checkViewVersionStatus(view.view_name, versionNumber);
+    deferredList.push(deferred);
+
+    deferred.promise.then(function(status) {
+      deferredList.splice(deferredList.indexOf(deferred), 1);
+      if (status !== 'DEPLOYED' && status !== 'ERROR') {
+        checkViewVersionStatus(view, versionObj, versionNumber);
+      } else {
+        $scope.$evalAsync(function() {
+          versionObj.status = status;
+          angular.forEach(view.versions, function(version) {
+            if(version.status === 'DEPLOYED'){
+              view.canCreateInstance = true;
+            }
+          })
+        });
+      }
+    });
+  }
+
   function loadViews(){
     View.all().then(function(views) {
       $scope.views = views;
       $scope.getFilteredViews();
+      angular.forEach(views, function(view) {
+        angular.forEach(view.versions, function(versionObj, versionNumber) {
+          if (versionObj.status !== 'DEPLOYED' || versionObj.status !== 'ERROR'){
+            checkViewVersionStatus(view, versionObj, versionNumber);
+          }
+        });
+      })
     }).catch(function(data) {
       uiAlert.danger(data.data.status, data.data.message);
     });

+ 18 - 0
ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/View.js

@@ -233,6 +233,24 @@ angular.module('ambariAdminConsole')
     });
   };
 
+  View.checkViewVersionStatus = function(view_name, version) {
+    var deferred = $q.defer();
+
+    $http({
+      method: 'GET',
+      url: Settings.baseUrl + '/views/' + view_name + '/versions/' + version,
+      params:{
+        'fields': 'ViewVersionInfo/status'
+      }
+    }).then(function(data) {
+      deferred.resolve(data.data.ViewVersionInfo.status);
+    }).catch(function(err) {
+      deferred.reject(err);
+    });
+
+    return deferred;
+  };
+
   View.all = function() {
     var deferred = $q.defer();
     var fields = [

+ 17 - 1
ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css

@@ -24,7 +24,7 @@
 .editable-list-container.well{
   padding: 10px;
   position: relative;
-  margin-bottom: 30px;
+  margin-bottom: 25px;
   cursor: pointer;
 }
 .editable-list-container.well.edit-mode{
@@ -487,6 +487,13 @@
   padding-top: 8px;
   padding-bottom: 8px;
 }
+.left-navbar .panel-body li .noclusters{
+  color: #808080;
+  padding-left: 33px;
+  padding-top: 8px;
+  padding-bottom: 8px;
+  margin: 0px;
+}
 .left-navbar .panel-body li.active a{
   background: #666;
 }
@@ -636,6 +643,15 @@ table.no-border tr td{
   min-height: 63px;
 }
 
+.views-permissions-panel .panel-body{
+  padding-bottom: 0;
+}
+
+.views-permissions-panel .panel-body table{
+  margin-bottom: 0;
+}
+
+
 .views-list-pane accordion .panel-group .panel-heading{
   cursor: pointer;
 }

+ 3 - 3
ambari-admin/src/main/resources/ui/admin-web/app/views/leftNavbar.html

@@ -63,9 +63,9 @@
       </div>
         
       <div ng-hide="cluster">
-        <a href="/#/installer/step0" class="btn btn-primary btn-block createcluster-btn">
-          Launch Install Wizard
-        </a>
+        <ul class="nav nav-pills nav-stacked">
+          <li><p class="noclusters">No clusters</p></li>
+        </ul>
       </div>
     </div>
   </div>

+ 2 - 2
ambari-agent/src/main/python/ambari_agent/ActionQueue.py

@@ -137,8 +137,8 @@ class ActionQueue(threading.Thread):
                       " of cluster " +  queued_command['clusterName'] + \
                       " to the queue.")
 
-    # Kill if in progress
-    self.customServiceOrchestrator.cancel_command(task_id, reason)
+      # Kill if in progress
+      self.customServiceOrchestrator.cancel_command(task_id, reason)
 
   def run(self):
     while not self.stopped():

+ 5 - 1
ambari-agent/src/main/python/ambari_agent/Controller.py

@@ -162,7 +162,11 @@ class Controller(threading.Thread):
   def cancelCommandInQueue(self, commands):
     """ Remove from the queue commands, kill the process if it's in progress """
     if commands:
-      self.actionQueue.cancel(commands)
+      try:
+        self.actionQueue.cancel(commands)
+      except Exception, err:
+        logger.error("Exception occurred on commands cancel: %s", err.message)
+        pass
     pass
 
   def addToQueue(self, commands):

+ 1 - 1
ambari-server/conf/unix/ambari.properties

@@ -21,7 +21,7 @@ resources.dir = /var/lib/ambari-server/resources
 custom.action.definitions = /var/lib/ambari-server/resources/custom_action_definitions
 jdk1.6.url=http://public-repo-1.hortonworks.com/ARTIFACTS/jdk-6u31-linux-x64.bin
 jce_policy1.6.url=http://public-repo-1.hortonworks.com/ARTIFACTS/jce_policy-6.zip
-jdk1.7.url=http://public-repo-1.hortonworks.com/ARTIFACTS/jdk-7u45-linux-x64.tar.gz
+jdk1.7.url=http://public-repo-1.hortonworks.com/ARTIFACTS/jdk-7u67-linux-x64.tar.gz
 jce_policy1.7.url=http://public-repo-1.hortonworks.com/ARTIFACTS/UnlimitedJCEPolicyJDK7.zip
 metadata.path=/var/lib/ambari-server/resources/stacks
 server.version.file=/var/lib/ambari-server/resources/version

+ 12 - 0
ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java

@@ -21,11 +21,13 @@ package org.apache.ambari.server.controller;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMMAND_TIMEOUT;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMPONENT_CATEGORY;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.CUSTOM_COMMAND;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.GROUP_LIST;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.HOOKS_FOLDER;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.REPO_INFO;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT_TYPE;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SERVICE_PACKAGE_FOLDER;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.USER_LIST;
 
 import java.util.ArrayList;
 import java.util.Collections;
@@ -65,6 +67,7 @@ import org.apache.ambari.server.state.ServiceInfo;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.StackInfo;
 import org.apache.ambari.server.state.State;
+import org.apache.ambari.server.state.PropertyInfo.PropertyType;
 import org.apache.ambari.server.state.stack.OsFamily;
 import org.apache.ambari.server.state.svccomphost.ServiceComponentHostOpInProgressEvent;
 import org.apache.ambari.server.utils.StageUtils;
@@ -296,6 +299,15 @@ public class AmbariCustomCommandExecutionHelper {
       // Set parameters required for re-installing clients on restart
       hostLevelParams.put(REPO_INFO, getRepoInfo
         (cluster, host));
+      
+      Set<String> userSet = configHelper.getPropertyValuesWithPropertyType(stackId, PropertyType.USER, cluster);
+      String userList = gson.toJson(userSet);
+      hostLevelParams.put(USER_LIST, userList);
+      
+      Set<String> groupSet = configHelper.getPropertyValuesWithPropertyType(stackId, PropertyType.GROUP, cluster);
+      String groupList = gson.toJson(groupSet);
+      hostLevelParams.put(GROUP_LIST, groupList);
+      
       execCmd.setHostLevelParams(hostLevelParams);
 
       Map<String, String> commandParams = new TreeMap<String, String>();

+ 81 - 28
ambari-server/src/main/java/org/apache/ambari/server/security/ldap/AmbariLdapDataPopulator.java

@@ -80,6 +80,17 @@ public class AmbariLdapDataPopulator {
    */
   private LdapTemplate ldapTemplate;
 
+  // Constants
+  private static final String UID_ATTRIBUTE          = "uid";
+  private static final String DN_ATTRIBUTE           = "dn";
+  private static final String OBJECT_CLASS_ATTRIBUTE = "objectClass";
+
+  /**
+   * Construct an AmbariLdapDataPopulator.
+   *
+   * @param configuration  the Ambari configuration
+   * @param users          utility that provides access to Users
+   */
   @Inject
   public AmbariLdapDataPopulator(Configuration configuration, Users users) {
     this.configuration = configuration;
@@ -134,8 +145,9 @@ public class AmbariLdapDataPopulator {
     final Map<String, User> internalUsersMap = getInternalUsers();
     final Set<LdapUserDto> externalUsers = getExternalLdapUserInfo();
     for (LdapUserDto externalUser : externalUsers) {
-      if (internalUsersMap.containsKey(externalUser)
-          && internalUsersMap.get(externalUser).isLdapUser()) {
+      String userName = externalUser.getUserName();
+      if (internalUsersMap.containsKey(userName)
+          && internalUsersMap.get(userName).isLdapUser()) {
         externalUser.setSynced(true);
       } else {
         externalUser.setSynced(false);
@@ -251,7 +263,7 @@ public class AmbariLdapDataPopulator {
   }
 
   /**
-   * Performs synchronization of given set of usernames.
+   * Performs synchronization of given set of user names.
    *
    * @param users set of users to synchronize
    * @throws AmbariException if synchronization failed for any reason
@@ -376,30 +388,43 @@ public class AmbariLdapDataPopulator {
     }
   }
 
+  /**
+   * Get the set of LDAP groups for the given group name.
+   *
+   * @param groupName  the group name
+   *
+   * @return the set of LDAP groups for the given name
+   */
   protected Set<LdapGroupDto> getLdapGroups(String groupName) {
-    Filter groupObjectFilter = new EqualsFilter("objectClass",
+    Filter groupObjectFilter = new EqualsFilter(OBJECT_CLASS_ATTRIBUTE,
         ldapServerProperties.getGroupObjectClass());
     Filter groupNameFilter = new LikeFilter(ldapServerProperties.getGroupNamingAttr(), groupName);
-    Set<LdapGroupDto> filteredLdapGroups = getFilteredLdapGroups(groupObjectFilter, groupNameFilter);
-    return filteredLdapGroups;
+    return getFilteredLdapGroups(groupObjectFilter, groupNameFilter);
   }
 
+  /**
+   * Get the set of LDAP users for the given user name.
+   *
+   * @param username  the user name
+   *
+   * @return the set of LDAP users for the given name
+   */
   protected Set<LdapUserDto> getLdapUsers(String username) {
-    Filter userObjectFilter = new EqualsFilter("objectClass", ldapServerProperties.getUserObjectClass());
+    Filter userObjectFilter = new EqualsFilter(OBJECT_CLASS_ATTRIBUTE, ldapServerProperties.getUserObjectClass());
     Filter userNameFilter = new LikeFilter(ldapServerProperties.getUsernameAttribute(), username);
-    Set<LdapUserDto> filteredLdapUsers = getFilteredLdapUsers(userObjectFilter, userNameFilter);
-    return filteredLdapUsers;
+    return getFilteredLdapUsers(userObjectFilter, userNameFilter);
   }
 
+  /**
+   * Get the LDAP member for the given member attribute.
+   *
+   * @param memberAttribute  the member attribute
+   *
+   * @return the user for the given member attribute; null if not found
+   */
   protected LdapUserDto getLdapUserByMemberAttr(String memberAttribute) {
-    // memberAttribute may be either DN or UID, check both
-    Filter userObjectFilter = new EqualsFilter("objectClass", ldapServerProperties.getUserObjectClass());
-    Filter dnFilter = new EqualsFilter("dn", memberAttribute);
-    Filter uidFilter = new EqualsFilter("uid", memberAttribute);
-    OrFilter orFilter = new OrFilter();
-    orFilter.or(dnFilter);
-    orFilter.or(uidFilter);
-    Set<LdapUserDto> filteredLdapUsers = getFilteredLdapUsers(userObjectFilter, orFilter);
+    Filter userObjectFilter = new EqualsFilter(OBJECT_CLASS_ATTRIBUTE, ldapServerProperties.getUserObjectClass());
+    Set<LdapUserDto> filteredLdapUsers = getFilteredLdapUsers(userObjectFilter, getMemberFilter(memberAttribute));
     return (filteredLdapUsers.isEmpty()) ? null : filteredLdapUsers.iterator().next();
   }
 
@@ -425,11 +450,38 @@ public class AmbariLdapDataPopulator {
    * @return set of info about LDAP groups
    */
   protected Set<LdapGroupDto> getExternalLdapGroupInfo() {
-    EqualsFilter groupObjectFilter = new EqualsFilter("objectClass",
+    EqualsFilter groupObjectFilter = new EqualsFilter(OBJECT_CLASS_ATTRIBUTE,
         ldapServerProperties.getGroupObjectClass());
     return getFilteredLdapGroups(groupObjectFilter);
   }
 
+  // get a filter based on the given member attribute
+  private Filter getMemberFilter(String memberAttribute) {
+
+    String   usernameAttribute = ldapServerProperties.getUsernameAttribute();
+    OrFilter memberFilter      = null;
+
+    String[] filters = memberAttribute.split(",");
+    for (String filter : filters) {
+      String[] operands = filter.split("=");
+      if (operands.length == 2) {
+
+        String lOperand = operands[0];
+
+        if (lOperand.equals(usernameAttribute) || lOperand.equals(UID_ATTRIBUTE) || lOperand.equals(DN_ATTRIBUTE)) {
+          if (memberFilter == null) {
+            memberFilter = new OrFilter();
+          }
+          memberFilter.or(new EqualsFilter(lOperand, operands[1]));
+        }
+      }
+    }
+    return memberFilter == null ?
+        new OrFilter().or(new EqualsFilter(DN_ATTRIBUTE, memberAttribute)).
+            or(new EqualsFilter(UID_ATTRIBUTE, memberAttribute)) :
+        memberFilter;
+  }
+
   private Set<LdapGroupDto> getFilteredLdapGroups(Filter...filters) {
     AndFilter andFilter = new AndFilter();
     for (Filter filter : filters) {
@@ -450,16 +502,18 @@ public class AmbariLdapDataPopulator {
 
         final LdapGroupDto group = new LdapGroupDto();
         final String groupNameAttribute = adapter.getStringAttribute(ldapServerProperties.getGroupNamingAttr());
-        group.setGroupName(groupNameAttribute.toLowerCase());
 
-        final String[] uniqueMembers = adapter.getStringAttributes(ldapServerProperties.getGroupMembershipAttr());
-        if (uniqueMembers != null) {
-          for (String uniqueMember: uniqueMembers) {
-            group.getMemberAttributes().add(uniqueMember.toLowerCase());
+        if (groupNameAttribute != null) {
+          group.setGroupName(groupNameAttribute.toLowerCase());
+
+          final String[] uniqueMembers = adapter.getStringAttributes(ldapServerProperties.getGroupMembershipAttr());
+          if (uniqueMembers != null) {
+            for (String uniqueMember: uniqueMembers) {
+              group.getMemberAttributes().add(uniqueMember.toLowerCase());
+            }
           }
+          groups.add(group);
         }
-
-        groups.add(group);
         return null;
       }
     });
@@ -472,7 +526,7 @@ public class AmbariLdapDataPopulator {
    * @return set of info about LDAP users
    */
   protected Set<LdapUserDto> getExternalLdapUserInfo() {
-    EqualsFilter userObjectFilter = new EqualsFilter("objectClass",
+    EqualsFilter userObjectFilter = new EqualsFilter(OBJECT_CLASS_ATTRIBUTE,
         ldapServerProperties.getUserObjectClass());
     return getFilteredLdapUsers(userObjectFilter);
   }
@@ -496,7 +550,7 @@ public class AmbariLdapDataPopulator {
         final LdapUserDto user = new LdapUserDto();
         final DirContextAdapter adapter  = (DirContextAdapter) ctx;
         final String usernameAttribute = adapter.getStringAttribute(ldapServerProperties.getUsernameAttribute());
-        final String uidAttribute = adapter.getStringAttribute("uid");
+        final String uidAttribute = adapter.getStringAttribute(UID_ATTRIBUTE);
         if (usernameAttribute != null && uidAttribute != null) {
           user.setUserName(usernameAttribute.toLowerCase());
           user.setUid(uidAttribute.toLowerCase());
@@ -590,5 +644,4 @@ public class AmbariLdapDataPopulator {
     }
     return ldapTemplate;
   }
-
 }

+ 1 - 1
ambari-server/src/main/python/ambari-server.py

@@ -390,7 +390,7 @@ ORACLE_DB_ID_TYPES = ["Service Name", "SID"]
 
 
 # jdk commands
-JDK_NAMES = ["jdk-7u45-linux-x64.tar.gz", "jdk-6u31-linux-x64.bin"]
+JDK_NAMES = ["jdk-7u67-linux-x64.tar.gz", "jdk-6u31-linux-x64.bin"]
 JDK_URL_PROPERTIES = ["jdk1.7.url", "jdk1.6.url"]
 JCE_URL_PROPERTIES = ["jce_policy1.7.url", "jce_policy1.6.url"]
 DEFAULT_JDK16_LOCATION = "/usr/jdk64/jdk1.6.0_31"

+ 0 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/hook.py

@@ -28,7 +28,6 @@ class AfterInstallHook(Hook):
     import params
 
     env.set_params(params)
-    setup_hadoop_env()
     setup_config()
 
 if __name__ == "__main__":

+ 4 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/params.py

@@ -59,4 +59,7 @@ hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
 
 #users and groups
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
-user_group = config['configurations']['cluster-env']['user_group']
+user_group = config['configurations']['cluster-env']['user_group']
+
+namenode_host = default("/clusterHostInfo/namenode_host", [])
+has_namenode = not len(namenode_host) == 0

+ 8 - 29
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/shared_initialization.py

@@ -19,34 +19,13 @@ limitations under the License.
 import os
 from resource_management import *
 
-def setup_hadoop_env():
-  import params
-  if params.security_enabled:
-    tc_owner = "root"
-  else:
-    tc_owner = params.hdfs_user
-    
-  Directory(params.hadoop_conf_empty_dir,
-            recursive=True,
-            owner='root',
-            group='root'
-  )
-  Link(params.hadoop_conf_dir,
-       to=params.hadoop_conf_empty_dir,
-       not_if=format("ls {hadoop_conf_dir}")
-  )
-  
-  File(os.path.join(params.hadoop_conf_dir, 'hadoop-env.sh'),
-       owner=tc_owner,
-       content=InlineTemplate(params.hadoop_env_sh_template)
-  )
-
 def setup_config():
   import params
-  XmlConfig("core-site.xml",
-            conf_dir=params.hadoop_conf_dir,
-            configurations=params.config['configurations']['core-site'],
-            configuration_attributes=params.config['configuration_attributes']['core-site'],
-            owner=params.hdfs_user,
-            group=params.user_group
-  )
+  if params.has_namenode:
+    XmlConfig("core-site.xml",
+              conf_dir=params.hadoop_conf_dir,
+              configurations=params.config['configurations']['core-site'],
+              configuration_attributes=params.config['configuration_attributes']['core-site'],
+              owner=params.hdfs_user,
+              group=params.user_group
+    )

+ 0 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/files/changeToSecureUid.sh → ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/files/changeToSecureUid.sh


+ 3 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/scripts/hook.py

@@ -27,6 +27,9 @@ class BeforeAnyHook(Hook):
     env.set_params(params)
     
     setup_jce()
+    setup_users()
+    setup_hadoop_env()
+
 
 if __name__ == "__main__":
   BeforeAnyHook().execute()

+ 102 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/scripts/params.py

@@ -18,6 +18,8 @@ limitations under the License.
 """
 
 from resource_management import *
+import collections
+import json
 
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
@@ -31,3 +33,103 @@ jdk_name = default("/hostLevelParams/jdk_name", None)
 java_home = config['hostLevelParams']['java_home']
 
 ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
+
+
+#RPM versioning support
+rpm_version = default("/configurations/cluster-env/rpm_version", None)
+
+#hadoop params
+if rpm_version:
+  mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce/*"
+  hadoop_libexec_dir = "/usr/hdp/current/hadoop/libexec"
+else:
+  mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
+  hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
+
+hadoop_conf_dir = "/etc/hadoop/conf"
+hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
+versioned_hdp_root = '/usr/hdp/current'
+#security params
+security_enabled = config['configurations']['cluster-env']['security_enabled']
+#java params
+java_home = config['hostLevelParams']['java_home']
+#hadoop params
+hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
+hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
+hadoop_root_logger = config['configurations']['hadoop-env']['hadoop_root_logger']
+
+#hadoop-env.sh
+java_home = config['hostLevelParams']['java_home']
+
+if str(config['hostLevelParams']['stack_version']).startswith('2.0') and System.get_instance().os_family != "suse":
+  # deprecated rhel jsvc_path
+  jsvc_path = "/usr/libexec/bigtop-utils"
+else:
+  jsvc_path = "/usr/lib/bigtop-utils"
+
+hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
+namenode_heapsize = config['configurations']['hadoop-env']['namenode_heapsize']
+namenode_opt_newsize =  config['configurations']['hadoop-env']['namenode_opt_newsize']
+namenode_opt_maxnewsize =  config['configurations']['hadoop-env']['namenode_opt_maxnewsize']
+
+jtnode_opt_newsize = "200m"
+jtnode_opt_maxnewsize = "200m"
+jtnode_heapsize =  "1024m"
+ttnode_heapsize = "1024m"
+
+dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
+mapred_pid_dir_prefix = default("/configurations/mapred-env/mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
+mapred_log_dir_prefix = default("/configurations/mapred-env/mapred_log_dir_prefix","/var/log/hadoop-mapreduce")
+hadoop_env_sh_template = config['configurations']['hadoop-env']['content']
+
+#users and groups
+hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+user_group = config['configurations']['cluster-env']['user_group']
+
+hagios_server_hosts = default("/clusterHostInfo/nagios_server_host", [])
+ganglia_server_hosts = default("/clusterHostInfo/ganglia_server_host", [])
+namenode_host = default("/clusterHostInfo/namenode_host", [])
+hbase_master_hosts = default("/clusterHostInfo/hbase_master_hosts", [])
+
+has_namenode = not len(namenode_host) == 0
+has_nagios = not len(hagios_server_hosts) == 0
+has_ganglia_server = not len(ganglia_server_hosts) == 0
+has_tez = 'tez-site' in config['configurations']
+has_hbase_masters = not len(hbase_master_hosts) == 0
+
+hbase_tmp_dir = config['configurations']['hbase-site']['hbase.tmp.dir']
+
+#users and groups
+hbase_user = config['configurations']['hbase-env']['hbase_user']
+nagios_user = config['configurations']['nagios-env']['nagios_user']
+smoke_user =  config['configurations']['cluster-env']['smokeuser']
+gmetad_user = config['configurations']['ganglia-env']["gmetad_user"]
+gmond_user = config['configurations']['ganglia-env']["gmond_user"]
+
+user_group = config['configurations']['cluster-env']['user_group']
+proxyuser_group =  default("/configurations/hadoop-env/proxyuser_group","users")
+nagios_group = config['configurations']['nagios-env']['nagios_group']
+
+ignore_groupsusers_create = default("/configurations/cluster-env/ignore_groupsusers_create", False)
+
+smoke_user_dirs = format("/tmp/hadoop-{smoke_user},/tmp/hsperfdata_{smoke_user},/home/{smoke_user},/tmp/{smoke_user},/tmp/sqoop-{smoke_user}")
+if has_hbase_masters:
+  hbase_user_dirs = format("/home/{hbase_user},/tmp/{hbase_user},/usr/bin/{hbase_user},/var/log/{hbase_user},{hbase_tmp_dir}")
+#repo params
+repo_info = config['hostLevelParams']['repo_info']
+service_repo_info = default("/hostLevelParams/service_repo_info",None)
+
+user_to_groups_dict = collections.defaultdict(lambda:[user_group])
+user_to_groups_dict[smoke_user] = [proxyuser_group]
+if has_ganglia_server:
+  user_to_groups_dict[gmond_user] = [gmond_user]
+  user_to_groups_dict[gmetad_user] = [gmetad_user]
+if has_tez:
+  user_to_groups_dict[tez_user] = [proxyuser_group]
+
+user_to_gid_dict = collections.defaultdict(lambda:user_group)
+if has_nagios:
+  user_to_gid_dict[nagios_user] = nagios_group
+
+user_list = json.loads(config['hostLevelParams']['user_list'])
+group_list = json.loads(config['hostLevelParams']['group_list'])

+ 58 - 0
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-ANY/scripts/shared_initialization.py

@@ -56,3 +56,61 @@ def setup_jce():
             cwd  = security_dir,
             path = ['/bin/','/usr/bin']
     )
+    
+
+def setup_users():
+  """
+  Creates users before cluster installation
+  """
+  import params
+  
+  for group in params.group_list:
+    Group(group,
+        ignore_failures = params.ignore_groupsusers_create
+    )
+    
+  for user in params.user_list:
+    User(user,
+        gid = params.user_to_gid_dict[user],
+        groups = params.user_to_groups_dict[user],
+        ignore_failures = params.ignore_groupsusers_create       
+    )
+           
+  set_uid(params.smoke_user, params.smoke_user_dirs)
+
+  if params.has_hbase_masters:
+    set_uid(params.hbase_user, params.hbase_user_dirs)
+    
+def set_uid(user, user_dirs):
+  """
+  user_dirs - comma separated directories
+  """
+  import params
+
+  File(format("{tmp_dir}/changeUid.sh"),
+       content=StaticFile("changeToSecureUid.sh"),
+       mode=0555)
+  Execute(format("{tmp_dir}/changeUid.sh {user} {user_dirs} 2>/dev/null"),
+          not_if = format("test $(id -u {user}) -gt 1000"))
+    
+def setup_hadoop_env():
+  import params
+  if params.has_namenode:
+    if params.security_enabled:
+      tc_owner = "root"
+    else:
+      tc_owner = params.hdfs_user
+    Directory(params.hadoop_conf_empty_dir,
+              recursive=True,
+              owner='root',
+              group='root'
+    )
+    Link(params.hadoop_conf_dir,
+         to=params.hadoop_conf_empty_dir,
+         not_if=format("ls {hadoop_conf_dir}")
+    )
+    File(os.path.join(params.hadoop_conf_dir, 'hadoop-env.sh'),
+         owner=tc_owner,
+         content=InlineTemplate(params.hadoop_env_sh_template)
+    )
+

+ 0 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/hook.py

@@ -32,7 +32,6 @@ class BeforeInstallHook(Hook):
     
     install_repos()
     setup_java()
-    setup_users()
     install_packages()
 
 if __name__ == "__main__":

+ 0 - 35
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/shared_initialization.py

@@ -21,41 +21,6 @@ import os
 
 from resource_management import *
 
-def setup_users():
-  """
-  Creates users before cluster installation
-  """
-  import params
-  
-  for group in params.group_list:
-    Group(group,
-        ignore_failures = params.ignore_groupsusers_create
-    )
-    
-  for user in params.user_list: 
-    User(user,
-        gid = params.user_to_gid_dict[user],
-        groups = params.user_to_groups_dict[user],
-        ignore_failures = params.ignore_groupsusers_create        
-    )
-  
-  set_uid(params.smoke_user, params.smoke_user_dirs)
-
-  if params.has_hbase_masters:
-    set_uid(params.hbase_user, params.hbase_user_dirs)
-
-def set_uid(user, user_dirs):
-  """
-  user_dirs - comma separated directories
-  """
-  import params
-
-  File(format("{tmp_dir}/changeUid.sh"),
-       content=StaticFile("changeToSecureUid.sh"),
-       mode=0555)
-  Execute(format("{tmp_dir}/changeUid.sh {user} {user_dirs} 2>/dev/null"),
-          not_if = format("test $(id -u {user}) -gt 1000"))
-
 def setup_java():
   """
   Installs jdk using specific params, that comes from ambari-server

+ 0 - 1
ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/hook.py

@@ -27,7 +27,6 @@ class BeforeStartHook(Hook):
     import params
 
     self.run_custom_hook('before-ANY')
-    self.run_custom_hook('after-INSTALL')
     env.set_params(params)
     
     setup_hadoop()

+ 1 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/hook.py

@@ -28,7 +28,7 @@ class AfterInstallHook(Hook):
     import params
 
     env.set_params(params)
-    setup_hadoop_env()
+    setup_hdp_install_directory()
     setup_config()
 
 if __name__ == "__main__":

+ 2 - 3
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py

@@ -23,10 +23,10 @@ from resource_management.core.system import System
 config = Script.get_config()
 
 #RPM versioning support
-rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+rpm_version = default("/configurations/cluster-env/rpm_version", None)
 
 #hadoop params
-if rpm_version is not None:
+if rpm_version:
   mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce/*"
   hadoop_libexec_dir = "/usr/hdp/current/hadoop/libexec"
 else:
@@ -44,7 +44,6 @@ java_home = config['hostLevelParams']['java_home']
 hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
 hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
 hadoop_root_logger = config['configurations']['hadoop-env']['hadoop_root_logger']
-hadoop_env_sh_template = config['configurations']['hadoop-env']['content']
 
 #hadoop-env.sh
 java_home = config['hostLevelParams']['java_home']

+ 5 - 23
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py

@@ -19,31 +19,13 @@ limitations under the License.
 import os
 from resource_management import *
 
-def setup_hadoop_env():
+def setup_hdp_install_directory():
   import params
-  if params.has_namenode:
-    if params.security_enabled:
-      tc_owner = "root"
-    else:
-      tc_owner = params.hdfs_user
-    Directory(params.hadoop_conf_empty_dir,
-              recursive=True,
-              owner='root',
-              group='root'
-    )
-    Link(params.hadoop_conf_dir,
-         to=params.hadoop_conf_empty_dir,
-         not_if=format("ls {hadoop_conf_dir}")
-    )
-    File(os.path.join(params.hadoop_conf_dir, 'hadoop-env.sh'),
-         owner=tc_owner,
-         content=InlineTemplate(params.hadoop_env_sh_template)
+  if params.rpm_version:
+    Execute(format('ln -s /usr/hdp/{rpm_version}-* {versioned_hdp_root}'),
+            not_if=format('ls {versioned_hdp_root}'),
+            only_if=format('ls -d /usr/hdp/{rpm_version}-*')
     )
-    if params.rpm_version is not None:
-      Execute(format('ln -s /usr/hdp/{rpm_version}-* {versioned_hdp_root}'),
-              not_if=format('ls {versioned_hdp_root}'),
-              only_if=format('ls -d /usr/hdp/{rpm_version}-*')
-      )
 
 def setup_config():
   import params

+ 0 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/files/changeToSecureUid.sh → ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/files/changeToSecureUid.sh


+ 2 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/hook.py

@@ -27,6 +27,8 @@ class BeforeAnyHook(Hook):
     env.set_params(params)
     
     setup_jce()
+    setup_users()
+    setup_hadoop_env()
 
 if __name__ == "__main__":
   BeforeAnyHook().execute()

+ 106 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py

@@ -18,12 +18,12 @@ limitations under the License.
 """
 
 from resource_management import *
+import collections
+import json
 
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
-security_enabled = config['configurations']['cluster-env']['security_enabled']
-
 artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
 jce_policy_zip = default("/hostLevelParams/jce_name", None) # None when jdk is already installed by user
 jce_location = config['hostLevelParams']['jdk_location']
@@ -31,3 +31,107 @@ jdk_name = default("/hostLevelParams/jdk_name", None)
 java_home = config['hostLevelParams']['java_home']
 
 ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
+
+#RPM versioning support
+rpm_version = default("/configurations/cluster-env/rpm_version", None)
+
+#hadoop params
+if rpm_version:
+  mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce/*"
+  hadoop_libexec_dir = "/usr/hdp/current/hadoop/libexec"
+else:
+  mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
+  hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
+
+hadoop_conf_dir = "/etc/hadoop/conf"
+hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
+versioned_hdp_root = '/usr/hdp/current'
+#security params
+security_enabled = config['configurations']['cluster-env']['security_enabled']
+#java params
+java_home = config['hostLevelParams']['java_home']
+#hadoop params
+hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
+hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
+hadoop_root_logger = config['configurations']['hadoop-env']['hadoop_root_logger']
+
+#hadoop-env.sh
+java_home = config['hostLevelParams']['java_home']
+
+if str(config['hostLevelParams']['stack_version']).startswith('2.0') and System.get_instance().os_family != "suse":
+  # deprecated rhel jsvc_path
+  jsvc_path = "/usr/libexec/bigtop-utils"
+else:
+  jsvc_path = "/usr/lib/bigtop-utils"
+
+hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
+namenode_heapsize = config['configurations']['hadoop-env']['namenode_heapsize']
+namenode_opt_newsize =  config['configurations']['hadoop-env']['namenode_opt_newsize']
+namenode_opt_maxnewsize =  config['configurations']['hadoop-env']['namenode_opt_maxnewsize']
+
+jtnode_opt_newsize = "200m"
+jtnode_opt_maxnewsize = "200m"
+jtnode_heapsize =  "1024m"
+ttnode_heapsize = "1024m"
+
+dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
+mapred_pid_dir_prefix = default("/configurations/mapred-env/mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
+mapred_log_dir_prefix = default("/configurations/mapred-env/mapred_log_dir_prefix","/var/log/hadoop-mapreduce")
+hadoop_env_sh_template = config['configurations']['hadoop-env']['content']
+
+#users and groups
+hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+hbase_user = config['configurations']['hbase-env']['hbase_user']
+nagios_user = config['configurations']['nagios-env']['nagios_user']
+smoke_user =  config['configurations']['cluster-env']['smokeuser']
+gmetad_user = config['configurations']['ganglia-env']["gmetad_user"]
+gmond_user = config['configurations']['ganglia-env']["gmond_user"]
+tez_user = config['configurations']['tez-env']["tez_user"]
+
+user_group = config['configurations']['cluster-env']['user_group']
+proxyuser_group =  default("/configurations/hadoop-env/proxyuser_group","users")
+nagios_group = config['configurations']['nagios-env']['nagios_group']
+
+hagios_server_hosts = default("/clusterHostInfo/nagios_server_host", [])
+ganglia_server_hosts = default("/clusterHostInfo/ganglia_server_host", [])
+namenode_host = default("/clusterHostInfo/namenode_host", [])
+hbase_master_hosts = default("/clusterHostInfo/hbase_master_hosts", [])
+
+has_namenode = not len(namenode_host) == 0
+has_nagios = not len(hagios_server_hosts) == 0
+has_ganglia_server = not len(ganglia_server_hosts) == 0
+has_tez = 'tez-site' in config['configurations']
+has_hbase_masters = not len(hbase_master_hosts) == 0
+
+hbase_tmp_dir = config['configurations']['hbase-site']['hbase.tmp.dir']
+
+hbase_user = config['configurations']['hbase-env']['hbase_user']
+smoke_user =  config['configurations']['cluster-env']['smokeuser']
+
+user_group = config['configurations']['cluster-env']['user_group']
+proxyuser_group = default("/configurations/hadoop-env/proxyuser_group","users")
+nagios_group = config['configurations']['nagios-env']['nagios_group']
+
+ignore_groupsusers_create = default("/configurations/cluster-env/ignore_groupsusers_create", False)
+
+smoke_user_dirs = format("/tmp/hadoop-{smoke_user},/tmp/hsperfdata_{smoke_user},/home/{smoke_user},/tmp/{smoke_user},/tmp/sqoop-{smoke_user}")
+if has_hbase_masters:
+  hbase_user_dirs = format("/home/{hbase_user},/tmp/{hbase_user},/usr/bin/{hbase_user},/var/log/{hbase_user},{hbase_tmp_dir}")
+#repo params
+repo_info = config['hostLevelParams']['repo_info']
+service_repo_info = default("/hostLevelParams/service_repo_info",None)
+
+user_to_groups_dict = collections.defaultdict(lambda:[user_group])
+user_to_groups_dict[smoke_user] = [proxyuser_group]
+if has_ganglia_server:
+  user_to_groups_dict[gmond_user] = [gmond_user]
+  user_to_groups_dict[gmetad_user] = [gmetad_user]
+if has_tez:
+  user_to_groups_dict[tez_user] = [proxyuser_group]
+
+user_to_gid_dict = collections.defaultdict(lambda:user_group)
+if has_nagios:
+  user_to_gid_dict[nagios_user] = nagios_group
+
+user_list = json.loads(config['hostLevelParams']['user_list'])
+group_list = json.loads(config['hostLevelParams']['group_list'])

+ 56 - 0
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py

@@ -56,3 +56,59 @@ def setup_jce():
             cwd  = security_dir,
             path = ['/bin/','/usr/bin']
     )
+
+def setup_users():
+  """
+  Creates users before cluster installation
+  """
+  import params
+  
+  for group in params.group_list:
+    Group(group,
+        ignore_failures = params.ignore_groupsusers_create
+    )
+    
+  for user in params.user_list:
+    User(user,
+        gid = params.user_to_gid_dict[user],
+        groups = params.user_to_groups_dict[user],
+        ignore_failures = params.ignore_groupsusers_create       
+    )
+           
+  set_uid(params.smoke_user, params.smoke_user_dirs)
+
+  if params.has_hbase_masters:
+    set_uid(params.hbase_user, params.hbase_user_dirs)
+    
+def set_uid(user, user_dirs):
+  """
+  user_dirs - comma separated directories
+  """
+  import params
+
+  File(format("{tmp_dir}/changeUid.sh"),
+       content=StaticFile("changeToSecureUid.sh"),
+       mode=0555)
+  Execute(format("{tmp_dir}/changeUid.sh {user} {user_dirs} 2>/dev/null"),
+          not_if = format("test $(id -u {user}) -gt 1000"))
+    
+def setup_hadoop_env():
+  import params
+  if params.has_namenode:
+    if params.security_enabled:
+      tc_owner = "root"
+    else:
+      tc_owner = params.hdfs_user
+    Directory(params.hadoop_conf_empty_dir,
+              recursive=True,
+              owner='root',
+              group='root'
+    )
+    Link(params.hadoop_conf_dir,
+         to=params.hadoop_conf_empty_dir,
+         not_if=format("ls {hadoop_conf_dir}")
+    )
+    File(os.path.join(params.hadoop_conf_dir, 'hadoop-env.sh'),
+         owner=tc_owner,
+         content=InlineTemplate(params.hadoop_env_sh_template)
+    )

+ 0 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/hook.py

@@ -33,7 +33,6 @@ class BeforeInstallHook(Hook):
     install_repos()
     install_packages()
     setup_java()
-    setup_users()
 
 if __name__ == "__main__":
   BeforeInstallHook().execute()

+ 0 - 35
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py

@@ -21,41 +21,6 @@ import os
 
 from resource_management import *
 
-def setup_users():
-  """
-  Creates users before cluster installation
-  """
-  import params
-  
-  for group in params.group_list:
-    Group(group,
-        ignore_failures = params.ignore_groupsusers_create
-    )
-    
-  for user in params.user_list:
-    User(user,
-        gid = params.user_to_gid_dict[user],
-        groups = params.user_to_groups_dict[user],
-        ignore_failures = params.ignore_groupsusers_create       
-    )
-           
-  set_uid(params.smoke_user, params.smoke_user_dirs)
-
-  if params.has_hbase_masters:
-    set_uid(params.hbase_user, params.hbase_user_dirs)
-
-def set_uid(user, user_dirs):
-  """
-  user_dirs - comma separated directories
-  """
-  import params
-
-  File(format("{tmp_dir}/changeUid.sh"),
-       content=StaticFile("changeToSecureUid.sh"),
-       mode=0555)
-  Execute(format("{tmp_dir}/changeUid.sh {user} {user_dirs} 2>/dev/null"),
-          not_if = format("test $(id -u {user}) -gt 1000"))
-  
 def setup_java():
   """
   Installs jdk using specific params, that comes from ambari-server

+ 0 - 1
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/hook.py

@@ -27,7 +27,6 @@ class BeforeStartHook(Hook):
     import params
 
     self.run_custom_hook('before-ANY')
-    self.run_custom_hook('after-INSTALL')
     env.set_params(params)
 
     setup_hadoop()

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py

@@ -24,10 +24,10 @@ import os
 config = Script.get_config()
 
 #RPM versioning support
-rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+rpm_version = default("/configurations/cluster-env/rpm_version", None)
 
 #hadoop params
-if rpm_version is not None:
+if rpm_version:
   mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce/*"
   hadoop_libexec_dir = "/usr/hdp/current/hadoop/libexec"
   hadoop_lib_home = "/usr/hdp/current/hadoop/lib"

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/package/scripts/params.py

@@ -27,10 +27,10 @@ proxyuser_group =  config['configurations']['hadoop-env']['proxyuser_group']
 security_enabled = False
 
 #RPM versioning support
-rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+rpm_version = default("/configurations/cluster-env/rpm_version", None)
 
 #hadoop params
-if rpm_version is not None:
+if rpm_version:
   flume_bin = '/usr/hdp/current/flume/bin/flume-ng'
 else:
   flume_bin = '/usr/bin/flume-ng'

+ 2 - 7
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py

@@ -27,15 +27,10 @@ config = Script.get_config()
 exec_tmp_dir = Script.get_tmp_dir()
 
 #RPM versioning support
-rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+rpm_version = default("/configurations/cluster-env/rpm_version", None)
 
 #hadoop params
-if rpm_version is not None:
-#RPM versioning support
-  rpm_version = default("/configurations/hadoop-env/rpm_version", None)
-
-#hadoop params
-if rpm_version is not None:
+if rpm_version:
   hadoop_bin_dir = format("/usr/hdp/current/hadoop/bin")
   daemon_script = format('/usr/hdp/current/hbase/bin/hbase-daemon.sh')
   region_mover = format('/usr/hdp/current/hbase/bin/region_mover.rb')

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py

@@ -25,10 +25,10 @@ config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
 #RPM versioning support
-rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+rpm_version = default("/configurations/cluster-env/rpm_version", None)
 
 #hadoop params
-if rpm_version is not None:
+if rpm_version:
   mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce/*"
   hadoop_libexec_dir = "/usr/hdp/current/hadoop/libexec"
   hadoop_bin = "/usr/hdp/current/hadoop/sbin"

+ 4 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py

@@ -27,12 +27,12 @@ config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
 #RPM versioning support
-rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+rpm_version = default("/configurations/cluster-env/rpm_version", None)
 
 hdp_stack_version = config['hostLevelParams']['stack_version']
 
 #hadoop params
-if rpm_version is not None:
+if rpm_version:
   hadoop_bin_dir = "/usr/hdp/current/hadoop/bin"
   hadoop_home = '/usr/hdp/current/hadoop'
   hadoop_streeming_jars = "/usr/hdp/current/hadoop-mapreduce/hadoop-streaming-*.jar"
@@ -43,6 +43,7 @@ if rpm_version is not None:
   tez_tar_file = "/usr/hdp/current/tez/lib/tez*.tar.gz"
   pig_tar_file = '/usr/hdp/current/pig/pig.tar.gz'
   hive_tar_file = '/usr/hdp/current/hive/hive.tar.gz'
+  sqoop_tar_file = '/usr/hdp/current/sqoop/sqoop*.tar.gz'
 
   hcat_lib = '/usr/hdp/current/hive/hive-hcatalog/share/hcatalog'
   webhcat_bin_dir = '/usr/hdp/current/hive-hcatalog/sbin'
@@ -58,6 +59,7 @@ else:
   tez_tar_file = "/usr/lib/tez/tez*.tar.gz"
   pig_tar_file = '/usr/share/HDP-webhcat/pig.tar.gz'
   hive_tar_file = '/usr/share/HDP-webhcat/hive.tar.gz'
+  sqoop_tar_file = '/usr/share/HDP-webhcat/sqoop*.tar.gz'
 
   if str(hdp_stack_version).startswith('2.0'):
     hcat_lib = '/usr/lib/hcatalog/share/hcatalog'

+ 23 - 9
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat.py

@@ -20,6 +20,8 @@ Ambari Agent
 """
 from resource_management import *
 import sys
+import os.path
+import glob
 
 
 def webhcat():
@@ -95,15 +97,16 @@ def webhcat():
                 hadoop_conf_dir=params.hadoop_conf_dir
   )
 
-  CopyFromLocal(params.pig_tar_file,
-                owner=params.webhcat_user,
-                mode=0755,
-                dest_dir=params.webhcat_apps_dir,
-                kinnit_if_needed=kinit_if_needed,
-                hdfs_user=params.hdfs_user,
-                hadoop_bin_dir=params.hadoop_bin_dir,
-                hadoop_conf_dir=params.hadoop_conf_dir
-  )
+  if (os.path.isfile(params.pig_tar_file)):
+    CopyFromLocal(params.pig_tar_file,
+                  owner=params.webhcat_user,
+                  mode=0755,
+                  dest_dir=params.webhcat_apps_dir,
+                  kinnit_if_needed=kinit_if_needed,
+                  hdfs_user=params.hdfs_user,
+                  hadoop_bin_dir=params.hadoop_bin_dir,
+                  hadoop_conf_dir=params.hadoop_conf_dir
+    )
 
   CopyFromLocal(params.hive_tar_file,
                 owner=params.webhcat_user,
@@ -114,3 +117,14 @@ def webhcat():
                 hadoop_bin_dir=params.hadoop_bin_dir,
                 hadoop_conf_dir=params.hadoop_conf_dir
   )
+
+  if (len(glob.glob(params.sqoop_tar_file)) > 0):
+    CopyFromLocal(params.sqoop_tar_file,
+                  owner=params.webhcat_user,
+                  mode=0755,
+                  dest_dir=params.webhcat_apps_dir,
+                  kinnit_if_needed=kinit_if_needed,
+                  hdfs_user=params.hdfs_user,
+                  hadoop_bin_dir=params.hadoop_bin_dir,
+                  hadoop_conf_dir=params.hadoop_conf_dir
+    )

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py

@@ -27,10 +27,10 @@ config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
 #RPM versioning support
-rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+rpm_version = default("/configurations/cluster-env/rpm_version", None)
 
 #hadoop params
-if rpm_version is not None:
+if rpm_version:
   hadoop_bin_dir = "/usr/hdp/current/hadoop/bin"
   hadoop_lib_home = "/usr/hdp/current/hadoop/lib"
   mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce/*"

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/params.py

@@ -26,10 +26,10 @@ config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
 #RPM versioning support
-rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+rpm_version = default("/configurations/cluster-env/rpm_version", None)
 
 #hadoop params
-if rpm_version is not None:
+if rpm_version:
   hadoop_bin_dir = "/usr/hdp/current/hadoop/bin"
   hadoop_home = '/usr/hdp/current/hadoop'
   pig_bin_dir = '/usr/hdp/current/pig/bin'

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py

@@ -22,10 +22,10 @@ from resource_management import *
 config = Script.get_config()
 
 #RPM versioning support
-rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+rpm_version = default("/configurations/cluster-env/rpm_version", None)
 
 #hadoop params
-if rpm_version is not None:
+if rpm_version:
   sqoop_conf_dir = '/usr/hdp/current/etc/sqoop/conf'
   sqoop_lib = '/usr/hdp/current/sqoop/lib'
   hbase_home = '/usr/hdp/current/hbase'

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py

@@ -28,10 +28,10 @@ config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
 #RPM versioning support
-rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+rpm_version = default("/configurations/cluster-env/rpm_version", None)
 
 #hadoop params
-if rpm_version is not None:
+if rpm_version:
   hadoop_libexec_dir = "/usr/hdp/current/hadoop/libexec"
   hadoop_bin = "/usr/hdp/current/hadoop/sbin"
   hadoop_bin_dir = "/usr/hdp/current/hadoop/bin"

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/params.py

@@ -27,10 +27,10 @@ config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
 #RPM versioning support
-rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+rpm_version = default("/configurations/cluster-env/rpm_version", None)
 
 #hadoop params
-if rpm_version is not None:
+if rpm_version:
   zk_bin = '/usr/hdp/current/zookeeper/bin'
   smoke_script = '/usr/hdp/current/zookeeper/bin/zkCli.sh'
 else:

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/package/scripts/params.py

@@ -24,10 +24,10 @@ from status_params import *
 config = Script.get_config()
 
 #RPM versioning support
-rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+rpm_version = default("/configurations/cluster-env/rpm_version", None)
 
 #hadoop params
-if rpm_version is not None:
+if rpm_version:
   hadoop_bin_dir = "/usr/hdp/current/hadoop/bin"
   falcon_webapp_dir = "/usr/hdp/current/falcon/webapp"
   falcon_home = "/usr/hdp/current/falcon"

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/package/scripts/params.py

@@ -25,10 +25,10 @@ import status_params
 config = Script.get_config()
 
 #RPM versioning support
-rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+rpm_version = default("/configurations/cluster-env/rpm_version", None)
 
 #hadoop params
-if rpm_version is not None:
+if rpm_version:
   rest_lib_dir = '/usr/hdp/current/storm/contrib/storm-rest'
   storm_bin_dir = "/usr/hdp/current/storm/bin"
 else:

+ 0 - 0
ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml → ambari-server/src/main/resources/stacks/HDP/2.2/configuration/cluster-env.xml


+ 11 - 0
ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/webhcat-site.xml

@@ -49,5 +49,16 @@ limitations under the License.
     <description>The path to the hcatalog executable.</description>
   </property>
 
+  <property>
+    <name>templeton.sqoop.archive</name>
+    <value>hdfs:///apps/webhcat/sqoop.tar.gz</value>
+    <description>The path to the Sqoop archive in HDFS.</description>
+  </property>
+
+  <property>
+    <name>templeton.sqoop.path</name>
+    <value>sqoop.tar.gz/sqoop/bin/sqoop</value>
+    <description>The path to the Sqoop executable.</description>
+  </property>
 
 </configuration>

+ 2 - 2
ambari-server/src/main/resources/stacks/HDP/2.2/services/SLIDER/package/scripts/params.py

@@ -24,10 +24,10 @@ from resource_management import *
 config = Script.get_config()
 
 #RPM versioning support
-rpm_version = default("/configurations/hadoop-env/rpm_version", None)
+rpm_version = default("/configurations/cluster-env/rpm_version", None)
 
 #hadoop params
-if rpm_version is not None:
+if rpm_version:
 #  slider_conf_dir = '/usr/lib/current/slider/conf'
 #  slider_bin_dir = '/usr/lib/current/slider/bin'
   slider_conf_dir = "/usr/lib/slider/conf"

+ 32 - 0
ambari-server/src/test/java/org/apache/ambari/server/security/ldap/AmbariLdapDataPopulatorTest.java

@@ -23,6 +23,7 @@ import java.util.Collections;
 import java.util.Date;
 import java.util.HashMap;
 import java.util.HashSet;
+import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -43,6 +44,7 @@ import org.easymock.EasyMock;
 import org.easymock.IAnswer;
 import org.junit.Test;
 import org.springframework.ldap.core.AttributesMapper;
+import org.springframework.ldap.core.ContextMapper;
 import org.springframework.ldap.core.LdapTemplate;
 
 import static junit.framework.Assert.*;
@@ -1402,6 +1404,36 @@ public class AmbariLdapDataPopulatorTest {
     verify(users);
   }
 
+  @Test
+  public void testGetLdapUserByMemberAttr() throws Exception {
+
+    Configuration configuration = createNiceMock(Configuration.class);
+    Users users = createNiceMock(Users.class);
+    LdapTemplate ldapTemplate = createNiceMock(LdapTemplate.class);
+    LdapServerProperties ldapServerProperties = createNiceMock(LdapServerProperties.class);
+    Capture<ContextMapper> contextMapperCapture = new Capture<ContextMapper>();
+
+    List list = new LinkedList();
+
+    expect(configuration.getLdapServerProperties()).andReturn(ldapServerProperties).anyTimes();
+    expect(ldapServerProperties.getUserObjectClass()).andReturn("objectClass").anyTimes();
+    expect(ldapServerProperties.getBaseDN()).andReturn("baseDN").anyTimes();
+
+    expect(ldapTemplate.search(eq("baseDN"), eq("(&(objectClass=objectClass)(|(dn=foo)(uid=foo)))"), capture(contextMapperCapture))).andReturn(list);
+    expect(ldapTemplate.search(eq("baseDN"), eq("(&(objectClass=objectClass)(uid=foo))"), capture(contextMapperCapture))).andReturn(list);
+
+    replay(ldapTemplate, ldapServerProperties, users, configuration);
+
+    AmbariLdapDataPopulatorTestInstance populator = new AmbariLdapDataPopulatorTestInstance(configuration, users);
+
+    populator.setLdapTemplate(ldapTemplate);
+
+    populator.getLdapUserByMemberAttr("foo");
+    populator.getLdapUserByMemberAttr("uid=foo,dc=example,dc=com");
+
+    verify(ldapTemplate, ldapServerProperties, users, configuration);
+  }
+
   private static int userIdCounter = 1;
 
   private User createUser(String name, boolean ldapUser, GroupEntity group) {

+ 1 - 1
ambari-server/src/test/python/stacks/1.3.2/configs/default.hbasedecom.json

@@ -14,7 +14,7 @@
         "stack_name": "HDP", 
         "db_name": "ambari", 
         "ambari_db_rca_driver": "org.postgresql.Driver", 
-        "jdk_name": "jdk-7u45-linux-x64.tar.gz", 
+        "jdk_name": "jdk-7u67-linux-x64.tar.gz",
         "ambari_db_rca_username": "mapred", 
         "java_home": "/usr/jdk64/jdk1.7.0_45", 
         "mysql_jdbc_url": "http://c6401.ambari.apache.org:8080/resources//mysql-connector-java.jar"

+ 1 - 1
ambari-server/src/test/python/stacks/1.3.2/configs/default.json

@@ -14,7 +14,7 @@
         "stack_name": "HDP", 
         "db_name": "ambari", 
         "ambari_db_rca_driver": "org.postgresql.Driver", 
-        "jdk_name": "jdk-7u45-linux-x64.tar.gz", 
+        "jdk_name": "jdk-7u67-linux-x64.tar.gz",
         "ambari_db_rca_username": "mapred", 
         "java_home": "/usr/jdk64/jdk1.7.0_45", 
         "mysql_jdbc_url": "http://c6401.ambari.apache.org:8080/resources//mysql-connector-java.jar",

+ 1 - 1
ambari-server/src/test/python/stacks/1.3.2/configs/default.non_gmetad_host.json

@@ -14,7 +14,7 @@
         "stack_name": "HDP", 
         "db_name": "ambari", 
         "ambari_db_rca_driver": "org.postgresql.Driver", 
-        "jdk_name": "jdk-7u45-linux-x64.tar.gz", 
+        "jdk_name": "jdk-7u67-linux-x64.tar.gz",
         "ambari_db_rca_username": "mapred", 
         "java_home": "/usr/jdk64/jdk1.7.0_45", 
         "mysql_jdbc_url": "http://c6401.ambari.apache.org:8080/resources//mysql-connector-java.jar"

+ 1 - 1
ambari-server/src/test/python/stacks/1.3.2/configs/default_client.json

@@ -14,7 +14,7 @@
         "stack_name": "HDP", 
         "db_name": "ambari", 
         "ambari_db_rca_driver": "org.postgresql.Driver", 
-        "jdk_name": "jdk-7u45-linux-x64.tar.gz", 
+        "jdk_name": "jdk-7u67-linux-x64.tar.gz",
         "ambari_db_rca_username": "mapred", 
         "java_home": "/usr/jdk64/jdk1.7.0_45", 
         "mysql_jdbc_url": "http://c6401.ambari.apache.org:8080/resources//mysql-connector-java.jar"

+ 1 - 1
ambari-server/src/test/python/stacks/1.3.2/configs/secured.json

@@ -14,7 +14,7 @@
         "stack_name": "HDP", 
         "db_name": "ambari", 
         "ambari_db_rca_driver": "org.postgresql.Driver", 
-        "jdk_name": "jdk-7u45-linux-x64.tar.gz", 
+        "jdk_name": "jdk-7u67-linux-x64.tar.gz",
         "ambari_db_rca_username": "mapred", 
         "java_home": "/usr/jdk64/jdk1.7.0_45", 
         "mysql_jdbc_url": "http://c6401.ambari.apache.org:8080/resources//mysql-connector-java.jar"

+ 1 - 1
ambari-server/src/test/python/stacks/1.3.2/configs/secured_client.json

@@ -14,7 +14,7 @@
         "stack_name": "HDP", 
         "db_name": "ambari", 
         "ambari_db_rca_driver": "org.postgresql.Driver", 
-        "jdk_name": "jdk-7u45-linux-x64.tar.gz", 
+        "jdk_name": "jdk-7u67-linux-x64.tar.gz",
         "ambari_db_rca_username": "mapred", 
         "java_home": "/usr/jdk64/jdk1.7.0_45", 
         "mysql_jdbc_url": "http://c6401.ambari.apache.org:8080/resources//mysql-connector-java.jar"

+ 1 - 1
ambari-server/src/test/python/stacks/1.3.2/configs/secured_no_jce_name.json

@@ -13,7 +13,7 @@
         "stack_name": "HDP", 
         "db_name": "ambari", 
         "ambari_db_rca_driver": "org.postgresql.Driver", 
-        "jdk_name": "jdk-7u45-linux-x64.tar.gz", 
+        "jdk_name": "jdk-7u67-linux-x64.tar.gz",
         "ambari_db_rca_username": "mapred", 
         "java_home": "/usr/jdk64/jdk1.7.0_45", 
         "mysql_jdbc_url": "http://c6401.ambari.apache.org:8080/resources//mysql-connector-java.jar"

+ 0 - 13
ambari-server/src/test/python/stacks/1.3.2/hooks/after-INSTALL/test_after_install.py

@@ -30,19 +30,6 @@ class TestHookAfterInstall(RMFTestCase):
                        command="hook",
                        config_file="default.json"
     )
-    self.assertResourceCalled('Directory', '/etc/hadoop/conf.empty',
-                              owner = 'root',
-                              group = 'root',
-                              recursive = True,
-                              )
-    self.assertResourceCalled('Link', '/etc/hadoop/conf',
-                              to = '/etc/hadoop/conf.empty',
-                              not_if = 'ls /etc/hadoop/conf'
-                              )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/hadoop-env.sh',
-                              content = InlineTemplate(self.getConfig()['configurations']['hadoop-env']['content']),
-                              owner = 'hdfs',
-                              )
     self.assertResourceCalled('XmlConfig', 'core-site.xml',
                               owner = 'hdfs',
                               group = 'hadoop',

+ 119 - 0
ambari-server/src/test/python/stacks/1.3.2/hooks/before-ANY/test_before_any.py

@@ -36,4 +36,123 @@ class TestHookBeforeInstall(RMFTestCase):
         ignore_failures = True,
         path = ['/bin', '/usr/bin/'],
     )
+    self.assertResourceCalled('Group', 'hadoop',
+        ignore_failures = False,
+    )
+    self.assertResourceCalled('Group', 'nobody',
+        ignore_failures = False,
+    )
+    self.assertResourceCalled('Group', 'users',
+        ignore_failures = False,
+    )
+    self.assertResourceCalled('Group', 'nagios',
+        ignore_failures = False,
+    )
+    self.assertResourceCalled('User', 'hive',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'oozie',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'nobody',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'nobody'],
+    )
+    self.assertResourceCalled('User', 'nagios',
+        gid = 'nagios',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'ambari-qa',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'users'],
+    )
+    self.assertResourceCalled('User', 'flume',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'hdfs',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'storm',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'mapred',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'hbase',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'tez',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'zookeeper',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'falcon',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'sqoop',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'yarn',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'hcat',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('File', '/tmp/changeUid.sh',
+        content = StaticFile('changeToSecureUid.sh'),
+        mode = 0555,
+    )
+    self.assertResourceCalled('Execute', '/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa 2>/dev/null',
+        not_if = 'test $(id -u ambari-qa) -gt 1000',
+    )
+    self.assertResourceCalled('File', '/tmp/changeUid.sh',
+        content = StaticFile('changeToSecureUid.sh'),
+        mode = 0555,
+    )
+    self.assertResourceCalled('Execute', '/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/hadoop/hbase 2>/dev/null',
+        not_if = 'test $(id -u hbase) -gt 1000',
+    )
+    self.assertResourceCalled('Directory', '/etc/hadoop/conf.empty',
+        owner = 'root',
+        group = 'root',
+        recursive = True,
+    )
+    self.assertResourceCalled('Link', '/etc/hadoop/conf',
+        not_if = 'ls /etc/hadoop/conf',
+        to = '/etc/hadoop/conf.empty',
+    )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/hadoop-env.sh',
+        content = InlineTemplate(self.getConfig()['configurations']['hadoop-env']['content']),
+        owner = 'hdfs',
+    )
     self.assertNoMoreResources()

+ 2 - 108
ambari-server/src/test/python/stacks/1.3.2/hooks/before-INSTALL/test_before_install.py

@@ -38,120 +38,14 @@ class TestHookBeforeInstall(RMFTestCase):
         repo_file_name='HDP',
         repo_template='repo_suse_rhel.j2'
     )
-    self.assertResourceCalled('Execute', 'mkdir -p /tmp/AMBARI-artifacts/ ; curl -kf -x \"\" --retry 10 http://c6401.ambari.apache.org:8080/resources//jdk-7u45-linux-x64.tar.gz -o /tmp/AMBARI-artifacts//jdk-7u45-linux-x64.tar.gz',
+    self.assertResourceCalled('Execute', 'mkdir -p /tmp/AMBARI-artifacts/ ; curl -kf -x \"\" --retry 10 http://c6401.ambari.apache.org:8080/resources//jdk-7u67-linux-x64.tar.gz -o /tmp/AMBARI-artifacts//jdk-7u67-linux-x64.tar.gz',
         not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
         path = ['/bin', '/usr/bin/'],
         environment = {'no_proxy': 'c6401.ambari.apache.org'}
     )
-    self.assertResourceCalled('Execute', 'mkdir -p /usr/jdk64 ; cd /usr/jdk64 ; tar -xf /tmp/AMBARI-artifacts//jdk-7u45-linux-x64.tar.gz > /dev/null 2>&1',
+    self.assertResourceCalled('Execute', 'mkdir -p /usr/jdk64 ; cd /usr/jdk64 ; tar -xf /tmp/AMBARI-artifacts//jdk-7u67-linux-x64.tar.gz > /dev/null 2>&1',
         not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
         path = ['/bin', '/usr/bin/'],
     )
-    self.assertResourceCalled('Group', 'hadoop',
-        ignore_failures = False,
-    )
-    self.assertResourceCalled('Group', 'nobody',
-        ignore_failures = False,
-    )
-    self.assertResourceCalled('Group', 'users',
-        ignore_failures = False,
-    )
-    self.assertResourceCalled('Group', 'nagios',
-        ignore_failures = False,
-    )
-    self.assertResourceCalled('User', 'hive',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'oozie',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'nobody',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'nobody'],
-    )
-    self.assertResourceCalled('User', 'nagios',
-        gid = 'nagios',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'ambari-qa',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'users'],
-    )
-    self.assertResourceCalled('User', 'flume',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'hdfs',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'storm',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'mapred',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'hbase',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'tez',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'zookeeper',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'falcon',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'sqoop',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'yarn',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'hcat',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('File', '/tmp/changeUid.sh',
-        content = StaticFile('changeToSecureUid.sh'),
-        mode = 0555,
-    )
-    self.assertResourceCalled('Execute', '/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa 2>/dev/null',
-        not_if = 'test $(id -u ambari-qa) -gt 1000',
-    )
-    self.assertResourceCalled('File', '/tmp/changeUid.sh',
-        content = StaticFile('changeToSecureUid.sh'),
-        mode = 0555,
-    )
-    self.assertResourceCalled('Execute', '/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/hadoop/hbase 2>/dev/null',
-        not_if = 'test $(id -u hbase) -gt 1000',
-    )
     self.assertResourceCalled('Package', 'unzip',)
     self.assertNoMoreResources()

+ 20 - 0
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py

@@ -20,6 +20,8 @@ limitations under the License.
 from mock.mock import MagicMock, patch
 from stacks.utils.RMFTestCase import *
 
+@patch("os.path.isfile", new = MagicMock(return_value=True))
+@patch("glob.glob", new = MagicMock(return_value=["one", "two"]))
 class TestWebHCatServer(RMFTestCase):
 
   def test_configure_default(self):
@@ -183,6 +185,15 @@ class TestWebHCatServer(RMFTestCase):
                               hadoop_conf_dir='/etc/hadoop/conf',
                               hdfs_user='hdfs'
     )
+    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/sqoop*.tar.gz',
+                              owner='hcat',
+                              mode=0755,
+                              dest_dir='/apps/webhcat',
+                              kinnit_if_needed='',
+                              hadoop_bin_dir='/usr/bin',
+                              hadoop_conf_dir='/etc/hadoop/conf',
+                              hdfs_user='hdfs'
+    )
 
   def assert_configure_secured(self):
     self.assertResourceCalled('HdfsDirectory', '/apps/webhcat',
@@ -275,4 +286,13 @@ class TestWebHCatServer(RMFTestCase):
                               hadoop_conf_dir='/etc/hadoop/conf',
                               hadoop_bin_dir='/usr/bin',
                               hdfs_user='hdfs'
+    )
+    self.assertResourceCalled('CopyFromLocal', '/usr/share/HDP-webhcat/sqoop*.tar.gz',
+                              owner='hcat',
+                              mode=0755,
+                              dest_dir='/apps/webhcat',
+                              kinnit_if_needed='/usr/bin/kinit -kt /etc/security/keytabs/hdfs.headless.keytab hdfs;',
+                              hadoop_conf_dir='/etc/hadoop/conf',
+                              hadoop_bin_dir='/usr/bin',
+                              hdfs_user='hdfs'
     )

+ 1 - 1
ambari-server/src/test/python/stacks/2.0.6/configs/default.hbasedecom.json

@@ -10,7 +10,7 @@
         "stack_version": "2.0",
         "stack_name": "HDP", 
         "ambari_db_rca_driver": "org.postgresql.Driver", 
-        "jdk_name": "jdk-7u45-linux-x64.tar.gz", 
+        "jdk_name": "jdk-7u67-linux-x64.tar.gz",
         "ambari_db_rca_username": "mapred", 
         "java_home": "/usr/jdk64/jdk1.7.0_45",
         "db_name": "ambari"

+ 1 - 1
ambari-server/src/test/python/stacks/2.0.6/configs/default.json

@@ -11,7 +11,7 @@
         "stack_version": "2.0",
         "stack_name": "HDP", 
         "ambari_db_rca_driver": "org.postgresql.Driver", 
-        "jdk_name": "jdk-7u45-linux-x64.tar.gz", 
+        "jdk_name": "jdk-7u67-linux-x64.tar.gz",
         "ambari_db_rca_username": "mapred", 
         "java_home": "/usr/jdk64/jdk1.7.0_45",
         "db_name": "ambari",

+ 1 - 1
ambari-server/src/test/python/stacks/2.0.6/configs/default.non_gmetad_host.json

@@ -10,7 +10,7 @@
         "stack_version": "2.0",
         "stack_name": "HDP", 
         "ambari_db_rca_driver": "org.postgresql.Driver", 
-        "jdk_name": "jdk-7u45-linux-x64.tar.gz", 
+        "jdk_name": "jdk-7u67-linux-x64.tar.gz",
         "ambari_db_rca_username": "mapred", 
         "java_home": "/usr/jdk64/jdk1.7.0_45",
         "db_name": "ambari"

+ 1 - 1
ambari-server/src/test/python/stacks/2.0.6/configs/default_client.json

@@ -11,7 +11,7 @@
         "stack_version": "2.0",
         "stack_name": "HDP", 
         "ambari_db_rca_driver": "org.postgresql.Driver", 
-        "jdk_name": "jdk-7u45-linux-x64.tar.gz", 
+        "jdk_name": "jdk-7u67-linux-x64.tar.gz",
         "ambari_db_rca_username": "mapred", 
         "java_home": "/usr/jdk64/jdk1.7.0_45",
         "db_name": "ambari"

+ 1 - 1
ambari-server/src/test/python/stacks/2.0.6/configs/flume_target.json

@@ -10,7 +10,7 @@
         "stack_version": "2.0",
         "stack_name": "HDP", 
         "ambari_db_rca_driver": "org.postgresql.Driver", 
-        "jdk_name": "jdk-7u45-linux-x64.tar.gz", 
+        "jdk_name": "jdk-7u67-linux-x64.tar.gz",
         "ambari_db_rca_username": "mapred", 
         "java_home": "/usr/jdk64/jdk1.7.0_45",
         "db_name": "ambari"

+ 1 - 1
ambari-server/src/test/python/stacks/2.0.6/configs/ha_default.json

@@ -15,7 +15,7 @@
         "stack_name": "HDP", 
         "db_name": "ambari", 
         "ambari_db_rca_driver": "org.postgresql.Driver", 
-        "jdk_name": "jdk-7u45-linux-x64.tar.gz", 
+        "jdk_name": "jdk-7u67-linux-x64.tar.gz",
         "ambari_db_rca_username": "mapred", 
         "java_home": "/usr/jdk64/jdk1.7.0_45", 
         "mysql_jdbc_url": "http://c6401.ambari.apache.org:8080/resources//mysql-connector-java.jar"

+ 1 - 1
ambari-server/src/test/python/stacks/2.0.6/configs/ha_secured.json

@@ -15,7 +15,7 @@
         "stack_name": "HDP", 
         "db_name": "ambari", 
         "ambari_db_rca_driver": "org.postgresql.Driver", 
-        "jdk_name": "jdk-7u45-linux-x64.tar.gz", 
+        "jdk_name": "jdk-7u67-linux-x64.tar.gz",
         "ambari_db_rca_username": "mapred", 
         "java_home": "/usr/jdk64/jdk1.7.0_45", 
         "mysql_jdbc_url": "http://c6401.ambari.apache.org:8080/resources//mysql-connector-java.jar"

+ 1 - 1
ambari-server/src/test/python/stacks/2.0.6/configs/secured.json

@@ -14,7 +14,7 @@
         "stack_name": "HDP", 
         "db_name": "ambari", 
         "ambari_db_rca_driver": "org.postgresql.Driver", 
-        "jdk_name": "jdk-7u45-linux-x64.tar.gz", 
+        "jdk_name": "jdk-7u67-linux-x64.tar.gz",
         "ambari_db_rca_username": "mapred", 
         "java_home": "/usr/jdk64/jdk1.7.0_45", 
         "mysql_jdbc_url": "http://c6401.ambari.apache.org:8080/resources//mysql-connector-java.jar"

+ 1 - 1
ambari-server/src/test/python/stacks/2.0.6/configs/secured_client.json

@@ -14,7 +14,7 @@
         "stack_name": "HDP", 
         "db_name": "ambari", 
         "ambari_db_rca_driver": "org.postgresql.Driver", 
-        "jdk_name": "jdk-7u45-linux-x64.tar.gz", 
+        "jdk_name": "jdk-7u67-linux-x64.tar.gz",
         "ambari_db_rca_username": "mapred", 
         "java_home": "/usr/jdk64/jdk1.7.0_45", 
         "mysql_jdbc_url": "http://c6401.ambari.apache.org:8080/resources//mysql-connector-java.jar"

+ 1 - 1
ambari-server/src/test/python/stacks/2.0.6/configs/secured_no_jce_name.json

@@ -13,7 +13,7 @@
         "stack_name": "HDP", 
         "db_name": "ambari", 
         "ambari_db_rca_driver": "org.postgresql.Driver", 
-        "jdk_name": "jdk-7u45-linux-x64.tar.gz", 
+        "jdk_name": "jdk-7u67-linux-x64.tar.gz",
         "ambari_db_rca_username": "mapred", 
         "java_home": "/usr/jdk64/jdk1.7.0_45", 
         "mysql_jdbc_url": "http://c6401.ambari.apache.org:8080/resources//mysql-connector-java.jar"

+ 0 - 13
ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py

@@ -30,19 +30,6 @@ class TestHookAfterInstall(RMFTestCase):
                        command="hook",
                        config_file="default.json"
     )
-    self.assertResourceCalled('Directory', '/etc/hadoop/conf.empty',
-                              owner = 'root',
-                              group = 'root',
-                              recursive = True,
-                              )
-    self.assertResourceCalled('Link', '/etc/hadoop/conf',
-                              to = '/etc/hadoop/conf.empty',
-                              not_if = 'ls /etc/hadoop/conf'
-    )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/hadoop-env.sh',
-                              content = InlineTemplate(self.getConfig()['configurations']['hadoop-env']['content']),
-                              owner = 'hdfs',
-                              )
     self.assertResourceCalled('XmlConfig', 'core-site.xml',
                               owner = 'hdfs',
                               group = 'hadoop',

+ 119 - 0
ambari-server/src/test/python/stacks/2.0.6/hooks/before-ANY/test_before_any.py

@@ -36,4 +36,123 @@ class TestHookBeforeInstall(RMFTestCase):
         ignore_failures = True,
         path = ['/bin', '/usr/bin/'],
     )
+    self.assertResourceCalled('Group', 'hadoop',
+        ignore_failures = False,
+    )
+    self.assertResourceCalled('Group', 'nobody',
+        ignore_failures = False,
+    )
+    self.assertResourceCalled('Group', 'users',
+        ignore_failures = False,
+    )
+    self.assertResourceCalled('Group', 'nagios',
+        ignore_failures = False,
+    )
+    self.assertResourceCalled('User', 'hive',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'oozie',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'nobody',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'nobody'],
+    )
+    self.assertResourceCalled('User', 'nagios',
+        gid = 'nagios',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'ambari-qa',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'users'],
+    )
+    self.assertResourceCalled('User', 'flume',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'hdfs',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'storm',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'mapred',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'hbase',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'tez',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'users'],
+    )
+    self.assertResourceCalled('User', 'zookeeper',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'falcon',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'sqoop',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'yarn',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('User', 'hcat',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = [u'hadoop'],
+    )
+    self.assertResourceCalled('File', '/tmp/changeUid.sh',
+        content = StaticFile('changeToSecureUid.sh'),
+        mode = 0555,
+    )
+    self.assertResourceCalled('Execute', '/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa 2>/dev/null',
+        not_if = 'test $(id -u ambari-qa) -gt 1000',
+    )
+    self.assertResourceCalled('File', '/tmp/changeUid.sh',
+        content = StaticFile('changeToSecureUid.sh'),
+        mode = 0555,
+    )
+    self.assertResourceCalled('Execute', '/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/hadoop/hbase 2>/dev/null',
+        not_if = 'test $(id -u hbase) -gt 1000',
+    )
+    self.assertResourceCalled('Directory', '/etc/hadoop/conf.empty',
+        owner = 'root',
+        group = 'root',
+        recursive = True,
+    )
+    self.assertResourceCalled('Link', '/etc/hadoop/conf',
+        not_if = 'ls /etc/hadoop/conf',
+        to = '/etc/hadoop/conf.empty',
+    )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/hadoop-env.sh',
+        content = InlineTemplate(self.getConfig()['configurations']['hadoop-env']['content']),
+        owner = 'hdfs',
+    )
     self.assertNoMoreResources()

+ 2 - 108
ambari-server/src/test/python/stacks/2.0.6/hooks/before-INSTALL/test_before_install.py

@@ -40,119 +40,13 @@ class TestHookBeforeInstall(RMFTestCase):
     )
     self.assertResourceCalled('Package', 'unzip',)
     self.assertResourceCalled('Package', 'curl',)
-    self.assertResourceCalled('Execute', 'mkdir -p /tmp/AMBARI-artifacts/ ;   curl -kf -x \"\"   --retry 10 http://c6401.ambari.apache.org:8080/resources//jdk-7u45-linux-x64.tar.gz -o /tmp/AMBARI-artifacts//jdk-7u45-linux-x64.tar.gz',
+    self.assertResourceCalled('Execute', 'mkdir -p /tmp/AMBARI-artifacts/ ;   curl -kf -x \"\"   --retry 10 http://c6401.ambari.apache.org:8080/resources//jdk-7u67-linux-x64.tar.gz -o /tmp/AMBARI-artifacts//jdk-7u67-linux-x64.tar.gz',
         not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
         path = ['/bin', '/usr/bin/'],
         environment = {'no_proxy': 'c6401.ambari.apache.org'},
     )
-    self.assertResourceCalled('Execute', 'mkdir -p /usr/jdk64 ; cd /usr/jdk64 ; tar -xf /tmp/AMBARI-artifacts//jdk-7u45-linux-x64.tar.gz > /dev/null 2>&1',
+    self.assertResourceCalled('Execute', 'mkdir -p /usr/jdk64 ; cd /usr/jdk64 ; tar -xf /tmp/AMBARI-artifacts//jdk-7u67-linux-x64.tar.gz > /dev/null 2>&1',
         not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
         path = ['/bin', '/usr/bin/'],
     )
-    self.assertResourceCalled('Group', 'hadoop',
-        ignore_failures = False,
-    )
-    self.assertResourceCalled('Group', 'nobody',
-        ignore_failures = False,
-    )
-    self.assertResourceCalled('Group', 'users',
-        ignore_failures = False,
-    )
-    self.assertResourceCalled('Group', 'nagios',
-        ignore_failures = False,
-    )
-    self.assertResourceCalled('User', 'hive',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'oozie',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'nobody',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'nobody'],
-    )
-    self.assertResourceCalled('User', 'nagios',
-        gid = 'nagios',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'ambari-qa',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'users'],
-    )
-    self.assertResourceCalled('User', 'flume',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'hdfs',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'storm',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'mapred',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'hbase',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'tez',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'users'],
-    )
-    self.assertResourceCalled('User', 'zookeeper',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'falcon',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'sqoop',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'yarn',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('User', 'hcat',
-        gid = 'hadoop',
-        ignore_failures = False,
-        groups = [u'hadoop'],
-    )
-    self.assertResourceCalled('File', '/tmp/changeUid.sh',
-        content = StaticFile('changeToSecureUid.sh'),
-        mode = 0555,
-    )
-    self.assertResourceCalled('Execute', '/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa 2>/dev/null',
-        not_if = 'test $(id -u ambari-qa) -gt 1000',
-    )
-    self.assertResourceCalled('File', '/tmp/changeUid.sh',
-        content = StaticFile('changeToSecureUid.sh'),
-        mode = 0555,
-    )
-    self.assertResourceCalled('Execute', '/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/hadoop/hbase 2>/dev/null',
-        not_if = 'test $(id -u hbase) -gt 1000',
-    )
     self.assertNoMoreResources()

+ 1 - 1
ambari-server/src/test/python/stacks/2.1/configs/default.json

@@ -10,7 +10,7 @@
         "stack_version": "2.1",
         "stack_name": "HDP", 
         "ambari_db_rca_driver": "org.postgresql.Driver", 
-        "jdk_name": "jdk-7u45-linux-x64.tar.gz", 
+        "jdk_name": "jdk-7u67-linux-x64.tar.gz",
         "ambari_db_rca_username": "mapred", 
         "java_home": "/usr/jdk64/jdk1.7.0_45",
         "db_name": "ambari"

+ 1 - 1
ambari-server/src/test/python/stacks/2.1/configs/secured.json

@@ -14,7 +14,7 @@
         "stack_name": "HDP", 
         "db_name": "ambari", 
         "ambari_db_rca_driver": "org.postgresql.Driver", 
-        "jdk_name": "jdk-7u45-linux-x64.tar.gz", 
+        "jdk_name": "jdk-7u67-linux-x64.tar.gz",
         "ambari_db_rca_username": "mapred", 
         "java_home": "/usr/jdk64/jdk1.7.0_45", 
         "mysql_jdbc_url": "http://c6401.ambari.apache.org:8080/resources//mysql-connector-java.jar",

+ 1 - 1
ambari-server/src/test/python/stacks/2.2/configs/default.json

@@ -10,7 +10,7 @@
         "stack_version": "2.1",
         "stack_name": "HDP",
         "ambari_db_rca_driver": "org.postgresql.Driver",
-        "jdk_name": "jdk-7u45-linux-x64.tar.gz",
+        "jdk_name": "jdk-7u67-linux-x64.tar.gz",
         "ambari_db_rca_username": "mapred",
         "java_home": "/usr/jdk64/jdk1.7.0_45",
         "db_name": "ambari"

+ 1 - 1
ambari-server/src/test/python/stacks/2.2/configs/secured.json

@@ -10,7 +10,7 @@
         "stack_version": "2.1",
         "stack_name": "HDP",
         "ambari_db_rca_driver": "org.postgresql.Driver",
-        "jdk_name": "jdk-7u45-linux-x64.tar.gz",
+        "jdk_name": "jdk-7u67-linux-x64.tar.gz",
         "ambari_db_rca_username": "mapred",
         "java_home": "/usr/jdk64/jdk1.7.0_45",
         "db_name": "ambari"

+ 2 - 0
ambari-web/app/app.js

@@ -72,6 +72,8 @@ module.exports = Em.Application.create({
     return Em.get((this.get('currentStackVersion') || this.get('defaultStackVersion')).match(/(.+)-\d.+/), '1');
   }.property('currentStackVersion'),
 
+  allHostNames: [],
+
   currentStackVersionNumber: function () {
     var regExp = new RegExp(this.get('currentStackName') + '-');
     return (this.get('currentStackVersion') || this.get('defaultStackVersion')).replace(regExp, '');

+ 1 - 1
ambari-web/app/assets/data/requests/host_check/jdk_name.json

@@ -6,7 +6,7 @@
     "properties" : {
       "java.home" : "/usr/jdk64/jdk1.7.0_58",
       "jdk_location" : "http://c6404.ambari.apache.org:8080/resources/",
-      "jdk.name": "jdk-7u45-linux-x64.tar.gz"
+      "jdk.name": "jdk-7u67-linux-x64.tar.gz"
     }
   }
 }

+ 2 - 2
ambari-web/app/controllers/application.js

@@ -44,8 +44,8 @@ App.ApplicationController = Em.Controller.extend(App.UserPref, {
   }.property('App.router.clusterController.isLoaded','App.router.loggedIn'),
 
   isExistingClusterDataLoaded: function () {
-    return !Em.isNone(App.router.get('clusterController.clusterName')) && this.get('isClusterDataLoaded');
-  }.property('App.router.clusterController.clusterName', 'isClusterDataLoaded'),
+    return App.router.get('clusterInstallCompleted') && this.get('isClusterDataLoaded');
+  }.property('App.router.clusterInstallCompleted', 'isClusterDataLoaded'),
 
   init: function(){
     this._super();

+ 22 - 0
ambari-web/app/controllers/global/cluster_controller.js

@@ -248,6 +248,7 @@ App.ClusterController = Em.Controller.extend({
    */
   loadClusterData: function () {
     var self = this;
+    this.getAllHostNames();
     this.loadAmbariProperties();
     if (!App.get('clusterName')) {
       return;
@@ -390,5 +391,26 @@ App.ClusterController = Em.Controller.extend({
       complete: function () {
       }
     });
+  },
+
+  /**
+   *
+   * @returns {*|Transport|$.ajax|boolean|ServerResponse}
+   */
+  getAllHostNames: function () {
+    return App.ajax.send({
+      name: 'hosts.all',
+      sender: this,
+      success: 'getHostNamesSuccess',
+      error: 'getHostNamesError'
+    });
+  },
+
+  getHostNamesSuccess: function (data) {
+    App.set("allHostNames", data.items.mapProperty("Hosts.host_name"));
+  },
+
+  getHostNamesError: function () {
+    console.error('failed to load hostNames');
   }
 });

+ 2 - 2
ambari-web/app/controllers/main.js

@@ -63,11 +63,11 @@ App.MainController = Em.Controller.extend({
   },
 
   startPolling: function () {
-    if (App.router.get('clusterController.isLoaded')) {
+    if (App.router.get('applicationController.isExistingClusterDataLoaded')) {
       App.router.get('updateController').set('isWorking', true);
       App.router.get('backgroundOperationsController').set('isWorking', true);
     }
-  }.observes('App.router.clusterController.isLoaded'),
+  }.observes('App.router.applicationController.isExistingClusterDataLoaded'),
   stopPolling: function(){
     App.router.get('updateController').set('isWorking', false);
     App.router.get('backgroundOperationsController').set('isWorking', false);

+ 0 - 24
ambari-web/app/controllers/main/host.js

@@ -47,30 +47,6 @@ App.MainHostController = Em.ArrayController.extend({
     return installedComponents;
   }.property('App.router.clusterController.isLoaded'),
 
-  /**
-   * Master components
-   * @returns {Array}
-   */
-  masterComponents: function () {
-    return this.get('componentsForFilter').filterProperty('isMaster', true);
-  }.property('componentsForFilter'),
-
-  /**
-   * Slave components
-   * @returns {Array}
-   */
-  slaveComponents: function () {
-    return this.get('componentsForFilter').filterProperty('isSlave', true);
-  }.property('componentsForFilter'),
-
-  /**
-   * Client components
-   * @returns {Array}
-   */
-  clientComponents: function () {
-    return this.get('componentsForFilter').filterProperty('isClient', true);
-  }.property('componentsForFilter'),
-
   content: function () {
     return this.get('dataSource').filterProperty('isRequested');
   }.property('dataSource.@each.isRequested'),

+ 1 - 0
ambari-web/app/controllers/main/host/add_controller.js

@@ -371,6 +371,7 @@ App.AddHostController = App.WizardController.extend({
     this.clearStorageData();
     App.router.get('updateController').updateAll();
     App.updater.immediateRun('updateHost');
+    App.router.get('clusterController').getAllHostNames();
   },
 
   /**

+ 5 - 1
ambari-web/app/controllers/main/host/details.js

@@ -393,7 +393,10 @@ App.MainHostDetailsController = Em.Controller.extend({
     var self = this;
     var component = event.context;
     var componentName = component.get('componentName');
-    var missedComponents = componentsUtils.checkComponentDependencies(componentName, this.get('content.hostComponents').mapProperty('componentName'))
+    var missedComponents = componentsUtils.checkComponentDependencies(componentName, {
+      scope: 'host',
+      installedComponents: this.get('content.hostComponents').mapProperty('componentName')
+    });
     if (!!missedComponents.length) {
       var popupMessage = Em.I18n.t('host.host.addComponent.popup.dependedComponents.body').format(component.get('displayName'),
         stringUtils.getFormattedStringFromArray(missedComponents.map(function(cName) {
@@ -1531,6 +1534,7 @@ App.MainHostDetailsController = Em.Controller.extend({
           dialogSelf.hide();
           App.router.transitionTo('hosts.index');
         });
+        App.router.get('clusterController').getAllHostNames();
       },
       deleteHostErrorCallback: function (xhr, textStatus, errorThrown, opt) {
         console.log('Error deleting host.');

+ 5 - 5
ambari-web/app/controllers/main/service/info/configs.js

@@ -450,7 +450,7 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.ServerValidatorM
     var serviceName = this.get('content.serviceName');
     var displayName = this.get('content.displayName');
     var selectedConfigGroup;
-    var hostsLength = App.router.get('mainHostController.hostsCountMap.TOTAL');
+    var defaultHosts = App.get('allHostNames');
 
     //parse loaded config groups
     if (App.supports.hostOverrides) {
@@ -470,7 +470,9 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.ServerValidatorM
               hosts: groupHosts,
               configSiteTags: []
             });
-            hostsLength -= groupHosts.length;
+            for (var i = 0; i< groupHosts.length ; i++) {
+              defaultHosts = defaultHosts.without(groupHosts[i]);
+            }
             item.desired_configs.forEach(function (config) {
               newConfigGroup.configSiteTags.push(App.ConfigSiteTag.create({
                 site: config.type,
@@ -491,9 +493,7 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.ServerValidatorM
       name: displayName + " Default",
       description: "Default cluster level " + serviceName + " configuration",
       isDefault: true,
-      hosts: {
-        length: hostsLength
-      },
+      hosts: defaultHosts,
       parentConfigGroup: null,
       service: this.get('content'),
       serviceName: serviceName,

+ 0 - 4
ambari-web/app/controllers/main/views_controller.js

@@ -25,10 +25,6 @@ App.MainViewsController = Em.Controller.extend({
 
   ambariViews: [],
 
-  init: function () {
-    this.loadAmbariViews();
-  },
-
   dataLoading: function () {
     var viewsController = this;
     var dfd = $.Deferred();

+ 4 - 4
ambari-web/app/controllers/wizard/step3_controller.js

@@ -1112,10 +1112,10 @@ App.WizardStep3Controller = Em.Controller.extend({
       var selectedOS = [];
       var self = this;
       var isValid = false;
-      if (selectedStack && selectedStack.operatingSystems) {
-        selectedStack.get('operatingSystems').filterProperty('selected', true).forEach(function (os) {
-          selectedOS.pushObject(os.osType);
-          if (self.repoToAgentOsType(os.osType).indexOf(osType) >= 0) {
+      if (selectedStack && selectedStack.get('operatingSystems')) {
+        selectedStack.get('operatingSystems').filterProperty('isSelected', true).forEach(function (os) {
+          selectedOS.pushObject(os.get('osType'));
+          if (self.repoToAgentOsType(os.get('osType')).indexOf(osType) >= 0) {
             isValid = true;
           }
         });

+ 2 - 2
ambari-web/app/controllers/wizard/step7_controller.js

@@ -150,7 +150,7 @@ App.WizardStep7Controller = Em.Controller.extend(App.ServerValidatorMixin, {
    */
   selectedServiceNames: function () {
     return this.get('content.services').filterProperty('isSelected', true).filterProperty('isInstalled', false).mapProperty('serviceName');
-  }.property('content.services', 'content.stacks.@each.isSelected').cacheable(),
+  }.property('content.services', 'content.services.@each.isSelected', 'content.services.@each.isInstalled', 'content.stacks.@each.isSelected').cacheable(),
 
   /**
    * List of installed and selected to install service names
@@ -160,7 +160,7 @@ App.WizardStep7Controller = Em.Controller.extend(App.ServerValidatorMixin, {
     return this.get('content.services').filter(function (service) {
       return service.get('isInstalled') || service.get('isSelected');
     }).mapProperty('serviceName');
-  }.property('content.services', 'content.stacks.@each.isSelected').cacheable(),
+  }.property('content.services', 'content.services.@each.isSelected', 'content.services.@each.isInstalled', 'content.stacks.@each.isSelected').cacheable(),
 
   /**
    * List of installed service names

+ 1 - 1
ambari-web/app/controllers/wizard/step8_controller.js

@@ -1154,7 +1154,7 @@ App.WizardStep8Controller = Em.Controller.extend(App.AddSecurityConfigs, {
     clientNames.forEach(function (clientName) {
       clientsMap[clientName] = Em.A([]);
       dependedComponents.forEach(function (component) {
-        if (component.get('dependencies').contains(clientName)) clientsMap[clientName].push(component.get('componentName'));
+        if (component.get('dependencies').mapProperty('componentName').contains(clientName)) clientsMap[clientName].push(component.get('componentName'));
       });
       if (!clientsMap[clientName].length) delete clientsMap[clientName];
     });

+ 13 - 1
ambari-web/app/data/HDP2/site_properties.js

@@ -1818,7 +1818,19 @@ module.exports =
       "serviceName": "MISC",
       "filename": "cluster-env.xml"
     },
-
+    {
+      "id": "puppet var",
+      "name": "rpm_version",
+      "displayName": "Hadoop RPM version",
+      "description": "Hadoop RPM version",
+      "defaultValue": '',
+      "isRequired": true,
+      "isOverridable": false,
+      "isVisible": false,
+      "isEditable": false,
+      "serviceName": "MISC",
+      "filename": "cluster-env.xml"
+    },
 
   /**********************************************MAPREDUCE2***************************************/
     {

+ 1 - 0
ambari-web/app/mappers/service_config_version_mapper.js

@@ -27,6 +27,7 @@ App.serviceConfigVersionsMapper = App.QuickDataMapper.create({
     create_time: 'createtime',
     group_id: 'group_id',
     group_name: 'group_name',
+    hosts: 'hosts',
     author: 'user',
     notes: 'service_config_version_note',
     is_current: 'is_current',

+ 5 - 2
ambari-web/app/mappers/stack_service_mapper.js

@@ -58,7 +58,7 @@ App.stackServiceMapper = App.QuickDataMapper.create({
     dependencies_key: 'dependencies',
     dependencies_type: 'array',
     dependencies: {
-      item: 'Dependencies.component_name'
+      item: 'Dependencies'
     }
   },
 
@@ -80,8 +80,11 @@ App.stackServiceMapper = App.QuickDataMapper.create({
         var stackService = item.StackServices;
         var serviceComponents = [];
         item.serviceComponents.forEach(function (serviceComponent) {
+          var dependencies = serviceComponent.dependencies.map(function(dependecy) {
+            return { Dependencies: App.keysUnderscoreToCamelCase(App.permit(dependecy.Dependencies, ['component_name', 'scope'])) };
+          });
           serviceComponent.StackServiceComponents.id = serviceComponent.StackServiceComponents.component_name;
-          serviceComponent.StackServiceComponents.dependencies = serviceComponent.dependencies;
+          serviceComponent.StackServiceComponents.dependencies = dependencies;
           serviceComponents.push(serviceComponent.StackServiceComponents);
           stackServiceComponents.push(this.parseIt(serviceComponent.StackServiceComponents, this.get('component_config')));
         }, this);

+ 1 - 1
ambari-web/app/messages.js

@@ -1709,7 +1709,7 @@ Em.I18n.translations = {
   'hosts.host.addComponent.msg':'Are you sure you want to add {0}?',
   'hosts.host.addComponent.addZooKeeper':'Adding ZooKeeper Server may reconfigure such properties:<ul><li>ha.zookeeper.quorum</li><li>hbase.zookeeper.quorum</li><li>templeton.zookeeper.hosts</li><li>yarn.resourcemanager.zk-address</li><li>hive.zookeeper.quorum</li></ul>',
   'hosts.host.addComponent.deleteHostWithZooKeeper':'Deleting host with ZooKeeper Server may reconfigure such properties:<ul><li>ha.zookeeper.quorum</li><li>hbase.zookeeper.quorum</li><li>templeton.zookeeper.hosts</li><li>yarn.resourcemanager.zk-address</li><li>hive.zookeeper.quorum</li></ul>',
-  'host.host.addComponent.popup.dependedComponents.body': '{0} requires {1} to be installed along with it. Please add them first and then try adding {0}',
+  'host.host.addComponent.popup.dependedComponents.body': '{0} requires {1} to be installed along with it on the same host. Please add them first and then try adding {0}',
   'host.host.addComponent.popup.dependedComponents.header': 'Component dependencies',
   'hosts.host.zooKeeper.configs.save.note': 'This configuration is created by ambari while installing/deleting zookeeper component on a host',
   'hosts.host.addComponent.note':'<b>Important:</b> After this <i>{0}</i> is installed, go to <i>Services -> Nagios</i> to restart the Nagios service.  This is required for the alerts and notifications to work properly.',

+ 9 - 52
ambari-web/app/mixins/common/serverValidator.js

@@ -43,9 +43,6 @@ App.ServerValidatorMixin = Em.Mixin.create({
    */
   recommendationsConfigs: null,
 
-  loadAdditionalinfo: function() {
-    return !(this.get('content.hosts') && this.get('content.hosts').length);
-  }.property('content.hosts'),
   /**
    * by default loads data from model otherwise must be overridden as computed property
    * refer to \assets\data\stacks\HDP-2.1\recommendations_configs.json to learn structure
@@ -55,8 +52,8 @@ App.ServerValidatorMixin = Em.Mixin.create({
   hostNames: function() {
     return this.get('content.hosts')
         ? Object.keys(this.get('content.hosts'))
-        : this.get('allHostNames');
-  }.property('content.hosts', 'allHostNames'),
+        : App.get('allHostNames');
+  }.property('content.hosts', 'App.allHostNames'),
 
   allHostNames: [],
   /**
@@ -64,10 +61,8 @@ App.ServerValidatorMixin = Em.Mixin.create({
    * @type {Array} - of strings (serviceNames)
    */
   serviceNames: function() {
-    return this.get('content.serviceName')
-        ? [this.get('content.serviceName')]
-        : App.StackService.find().filter(function(s){return s.get('isSelected') || s.get('isInstalled')}).mapProperty('serviceName');
-  }.property('content.serviceName'),
+    return this.get('content.serviceName') ? [this.get('content.serviceName')] : this.get('allSelectedServiceNames');
+  }.property('content.serviceName', 'allSelectedServiceNames.@each'),
 
   /**
    * by default loads data from model otherwise must be overridden as computed property
@@ -77,10 +72,10 @@ App.ServerValidatorMixin = Em.Mixin.create({
   services: function() {
     return this.get('content.serviceName')
         ? [App.StackService.find(this.get('content.serviceName'))]
-        : App.StackService.find().filter(function(s){
+        : this.get('content.services').filter(function(s){
           return (s.get('isSelected') || s.get('isInstalled'))
         }).concat(require("data/service_configs"));
-  }.property('content.serviceName'),
+  }.property('content.serviceName', 'content.services', 'content.services.@each.isSelected', 'content.services.@each.isInstalled', 'content.stacks.@each.isSelected'),
 
   /**
    * by default loads data from model otherwise must be overridden as computed property
@@ -139,21 +134,14 @@ App.ServerValidatorMixin = Em.Mixin.create({
     console.error('Load recommendations failed');
   },
 
-  serverSideValidation: function() {
+  serverSideValidation: function () {
     var deferred = $.Deferred();
     if (!App.get('supports.serverRecommendValidate')) {
       deferred.resolve();
     } else {
       this.set('configValidationFailed', false);
-      if (this.get('loadAdditionalinfo')) {
-        var self = this;
-        this.getHostNames().always(function() {
-          if (self.get('configValidationFailed')) {
-            self.warnUser(deferred);
-          } else {
-            self.runServerSideValidation(deferred);
-          }
-        });
+      if (this.get('configValidationFailed')) {
+        this.warnUser(deferred);
       } else {
         this.runServerSideValidation(deferred);
       }
@@ -161,37 +149,6 @@ App.ServerValidatorMixin = Em.Mixin.create({
     return deferred;
   },
 
-  getHostNames: function() {
-    var self = this;
-
-    if (self.get('isInstaller')) {
-      // In installer wizard 'hosts.all' AJAX will not work cause cluster haven't been created yet
-      var hosts = [];
-      for (var host in self.get('content.hosts')) {
-        hosts.push(host);
-      }
-      self.set("allHostNames", hosts);
-      var deferred = $.Deferred();
-      deferred.resolve();
-      return deferred;
-    } else {
-      return App.ajax.send({
-        name: 'hosts.all',
-        sender: self,
-        success: 'getHostNamesSuccess',
-        error: 'getHostNamesError'
-      });
-    }
-  },
-
-  getHostNamesSuccess: function(data) {
-    this.set("allHostNames", data.items.mapProperty("Hosts.host_name"));
-  },
-
-  getHostNamesError: function() {
-    this.set('configValidationFailed', true);
-    console.error('failed to load hostNames');
-  },
   /**
    * @method serverSideValidation
    * send request to validate configs

+ 1 - 1
ambari-web/app/models/repository.js

@@ -37,7 +37,7 @@ App.Repository = DS.Model.extend({
 
   isSelected: function() {
     return this.get('operatingSystem.isSelected');
-  }.property('id'),
+  }.property('id','operatingSystem.isSelected'),
 
   emptyError: function() {
     return !this.get('baseUrl');

+ 19 - 4
ambari-web/app/models/service_config_version.js

@@ -33,15 +33,19 @@ App.ServiceConfigVersion = DS.Model.extend({
   author: DS.attr('string'),
   notes: DS.attr('string'),
   service: DS.belongsTo('App.Service'),
+  hosts: DS.attr('array'),
   index: DS.attr('number'),
   isCurrent: DS.attr('boolean'),
   isDisplayed: DS.attr('boolean'),
+  isDefault: function() {
+    return this.get('groupName') === 'default';
+  }.property('groupName'),
   currentTooltip: function () {
     return Em.I18n.t('dashboard.configHistory.table.current.tooltip').format(this.get('displayName'), this.get('configGroupName'));
   }.property('displayName', 'configGroupName'),
   configGroupName: function () {
-    return (this.get('groupName') === 'default') ? (this.get('displayName') + ' ' + Em.I18n.t('common.default')) : this.get('groupName');
-  }.property('groupName'),
+    return this.get('isDefault') ? (this.get('displayName') + ' ' + Em.I18n.t('common.default')) : this.get('groupName');
+  }.property('groupName','isDefault'),
   fullNotes: function () {
     return (typeof this.get('notes') === 'string') ? this.get('notes') || Em.I18n.t('dashboard.configHistory.table.notes.no') : Em.I18n.t('dashboard.configHistory.table.notes.no');
   }.property('notes'),
@@ -68,8 +72,19 @@ App.ServiceConfigVersion = DS.Model.extend({
    */
   isRequested: DS.attr('boolean'),
   isRestartRequired: function () {
-    return this.get('service.isRestartRequired') && this.get('isCurrent');
-  }.property('service.isRestartRequired', 'isCurrent'),
+    if (this.get('service.isRestartRequired') && this.get('isCurrent')) {
+      var hostNames = this.get('isDefault')
+        ? App.router.get('mainServiceInfoConfigsController.configGroups').findProperty('isDefault').get('hosts')
+        : this.get('hosts');
+      if (!hostNames.length) return false;
+      for (var i = 0; i < hostNames.length; i++) {
+        if (Object.keys(this.get('service.restartRequiredHostsAndComponents')).contains(hostNames[i])) {
+          return true;
+        }
+      }
+    }
+    return false;
+  }.property('service.isRestartRequired','isDefault', 'isCurrent', 'hosts', 'service.restartRequiredHostsAndComponents', 'router.mainServiceInfoConfigsController.configGroups'),
   disabledActionMessages: function () {
     return {
       view: (this.get('isDisplayed')) ? Em.I18n.t('dashboard.configHistory.info-bar.view.button.disabled') : '',

+ 2 - 1
ambari-web/app/routes/installer.js

@@ -35,6 +35,7 @@ module.exports = Em.Route.extend({
           var name = 'Cluster Install Wizard';
           $('title').text('Ambari - ' + name);
 
+          App.router.get('mainViewsController').loadAmbariViews();
           if (App.get('isAdmin')) {
             router.get('mainController').stopPolling();
             console.log('In installer with successful authenticated');
@@ -82,7 +83,6 @@ module.exports = Em.Route.extend({
               App.router.transitionTo('main.views.index');
             });
           }
-
         });
       } else {
         console.log('In installer but its not authenticated');
@@ -426,6 +426,7 @@ module.exports = Em.Route.extend({
         // We need to do recovery based on whether we are in Add Host or Installer wizard
         controller.saveClusterState('DEFAULT');
         App.router.set('clusterController.isLoaded', false);
+        router.set('clusterInstallCompleted', true);
         router.transitionTo('main.dashboard.index');
       });
     }

+ 2 - 1
ambari-web/app/routes/main.js

@@ -26,11 +26,12 @@ module.exports = Em.Route.extend({
     console.log('in /main:enter');
     router.getAuthenticated().done(function (loggedIn) {
       if (loggedIn) {
+        App.router.get('mainViewsController').loadAmbariViews();
         App.router.get('clusterController').loadClusterName(false).done(function () {
           if (App.get('testMode')) {
             router.get('mainController').initialize();
           } else {
-            if (App.get('clusterName')) {
+            if (router.get('clusterInstallCompleted')) {
               App.router.get('mainController').checkServerClientVersion().done(function () {
                 App.router.get('clusterController').loadClientServerClockDistance().done(function () {
                   router.get('mainController').initialize();

+ 3 - 3
ambari-web/app/templates/main/host/component_filter.hbs

@@ -28,7 +28,7 @@
             {{view Ember.Checkbox checkedBinding="view.masterComponentsChecked"}} {{t host.host.componentFilter.master}}:
             </label>
             <ul>
-            {{#each component in masterComponents}}
+            {{#each component in view.masterComponents}}
               <li>
                 <label class="checkbox">
                 {{view Ember.Checkbox checkedBinding="component.checkedForHostFilter" }} {{unbound component.displayName}}
@@ -42,7 +42,7 @@
             {{view Ember.Checkbox checkedBinding="view.slaveComponentsChecked"}} {{t host.host.componentFilter.slave}}:
             </label>
             <ul>
-            {{#each component in slaveComponents}}
+            {{#each component in view.slaveComponents}}
               <li>
                 <label class="checkbox">
                 {{view Ember.Checkbox checkedBinding="component.checkedForHostFilter" }} {{unbound component.displayName}}
@@ -56,7 +56,7 @@
             {{view Ember.Checkbox checkedBinding="view.clientComponentsChecked"}} {{t host.host.componentFilter.client}}:
             </label>
             <ul>
-            {{#each component in clientComponents}}
+            {{#each component in view.clientComponents}}
               <li>
               <label class="checkbox">
               {{view Ember.Checkbox checkedBinding="component.checkedForHostFilter" }} {{unbound component.displayName}}

+ 2 - 2
ambari-web/app/utils/ajax/ajax.js

@@ -1094,7 +1094,7 @@ var urls = {
     }
   },
   'wizard.service_components': {
-    'real': '{stackUrl}/services?fields=StackServices/*,serviceComponents/*',
+    'real': '{stackUrl}/services?fields=StackServices/*,serviceComponents/*,serviceComponents/dependencies/Dependencies/scope',
     'mock': '/data/stacks/HDP-2.1/service_components.json',
     'format': function(data) {
       return {
@@ -1882,7 +1882,7 @@ var urls = {
     }
   },
   'service.serviceConfigVersions.get': {
-    real: '/clusters/{clusterName}/configurations/service_config_versions?service_name={serviceName}&fields=service_config_version,user,group_id,group_name,is_current,createtime,service_name,service_config_version_note&minimal_response=true',
+    real: '/clusters/{clusterName}/configurations/service_config_versions?service_name={serviceName}&fields=service_config_version,user,hosts,group_id,group_name,is_current,createtime,service_name,service_config_version_note&minimal_response=true',
     mock: '/data/configurations/service_versions.json'
   },
   'service.serviceConfigVersions.get.current': {

+ 28 - 5
ambari-web/app/utils/components.js

@@ -137,15 +137,38 @@ module.exports = {
   },
   /**
    * Check if all required components are installed on host.
+   * Available options:
+   *  scope: 'host' - dependency level `host`,`cluster` or `*`.
+   *  hostName: 'example.com' - host name to search installed components
+   *  installedComponents: ['A', 'B'] - names of installed components
+   *
+   * By default scope level is `*`
+   * For host level dependency you should specify at least `hostName` or `installedComponents` attribute.
    *
    * @param {String} componentName
-   * @param {Array} installedComponentNames
+   * @param {Object} opt - options. Allowed options are `hostName`, `installedComponents`, `scope`.
    * @return {Array} - names of missed components
    */
-  checkComponentDependencies: function(componentName, installedComponentNames) {
-    return App.StackServiceComponent.find(componentName).get('dependencies').filter(function(dependency) {
-      return !installedComponentNames.contains(dependency)
-    });
+  checkComponentDependencies: function(componentName, opt) {
+    opt = opt || {};
+    opt.scope = opt.scope || '*';
+    var installedComponents;
+    var dependencies = App.StackServiceComponent.find(componentName).get('dependencies');
+    dependencies = opt.scope === '*' ? dependencies : dependencies.filterProperty('scope', opt.scope);
+    if (dependencies.length == 0) return [];
+    switch (opt.scope) {
+      case 'host':
+        Em.assert("You should pass at least `hostName` or `installedComponents` to options.", opt.hostName || opt.installedComponents);
+        installedComponents = opt.installedComponents || App.HostComponent.find().filterProperty('hostName', opt.hostName).mapProperty('componentName').uniq();
+        break;
+      default:
+        // @todo: use more appropriate value regarding installed components
+        installedComponents = opt.installedComponents || App.HostComponent.find().mapProperty('componentName').uniq();
+        break;
+    }
+    return dependencies.filter(function(dependency) {
+      return !installedComponents.contains(dependency.componentName);
+    }).mapProperty('componentName');
   }
 
 };

Неке датотеке нису приказане због велике количине промена