Browse Source

AMBARI-10576. Add the ability to obtain details about required Kerberos identities (rlevas)

Robert Levas 10 năm trước cách đây
mục cha
commit
66e42cbab8
35 tập tin đã thay đổi với 2482 bổ sung36 xóa
  1. 1 1
      ambari-server/src/main/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRenderer.java
  2. 83 0
      ambari-server/src/main/java/org/apache/ambari/server/api/query/render/HostKerberosIdentityCsvRenderer.java
  3. 25 3
      ambari-server/src/main/java/org/apache/ambari/server/api/resources/BaseResourceDefinition.java
  4. 1 1
      ambari-server/src/main/java/org/apache/ambari/server/api/resources/HostComponentResourceDefinition.java
  5. 56 0
      ambari-server/src/main/java/org/apache/ambari/server/api/resources/HostKerberosIdentityResourceDefinition.java
  6. 1 0
      ambari-server/src/main/java/org/apache/ambari/server/api/resources/HostResourceDefinition.java
  7. 4 0
      ambari-server/src/main/java/org/apache/ambari/server/api/resources/ResourceInstanceFactoryImpl.java
  8. 6 0
      ambari-server/src/main/java/org/apache/ambari/server/api/services/BaseService.java
  9. 16 0
      ambari-server/src/main/java/org/apache/ambari/server/api/services/ClusterService.java
  10. 121 0
      ambari-server/src/main/java/org/apache/ambari/server/api/services/HostKerberosIdentityService.java
  11. 11 0
      ambari-server/src/main/java/org/apache/ambari/server/api/services/HostService.java
  12. 2 2
      ambari-server/src/main/java/org/apache/ambari/server/api/services/ResultPostProcessorImpl.java
  13. 224 0
      ambari-server/src/main/java/org/apache/ambari/server/api/services/serializers/CsvSerializer.java
  14. 2 2
      ambari-server/src/main/java/org/apache/ambari/server/api/services/serializers/JsonSerializer.java
  15. 10 2
      ambari-server/src/main/java/org/apache/ambari/server/api/util/TreeNode.java
  16. 10 4
      ambari-server/src/main/java/org/apache/ambari/server/api/util/TreeNodeImpl.java
  17. 2 0
      ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
  18. 167 0
      ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
  19. 3 0
      ambari-server/src/main/java/org/apache/ambari/server/controller/ResourceProviderFactory.java
  20. 2 0
      ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractControllerResourceProvider.java
  21. 243 0
      ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostKerberosIdentityResourceProvider.java
  22. 3 1
      ambari-server/src/main/java/org/apache/ambari/server/controller/spi/Resource.java
  23. 13 0
      ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosIdentityDescriptor.java
  24. 21 0
      ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosKeytabDescriptor.java
  25. 45 13
      ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosPrincipalDescriptor.java
  26. 2 2
      ambari-server/src/test/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRendererTest.java
  27. 2 2
      ambari-server/src/test/java/org/apache/ambari/server/api/query/render/MinimalRendererTest.java
  28. 2 2
      ambari-server/src/test/java/org/apache/ambari/server/api/resources/BaseResourceDefinitionTest.java
  29. 48 0
      ambari-server/src/test/java/org/apache/ambari/server/api/resources/HostKerberosIdentityResourceDefinitionTest.java
  30. 2 1
      ambari-server/src/test/java/org/apache/ambari/server/api/resources/HostResourceDefinitionTest.java
  31. 10 0
      ambari-server/src/test/java/org/apache/ambari/server/api/resources/ResourceInstanceFactoryImplTest.java
  32. 93 0
      ambari-server/src/test/java/org/apache/ambari/server/api/services/HostKerberosIdentityServiceTest.java
  33. 258 0
      ambari-server/src/test/java/org/apache/ambari/server/api/services/serializers/CsvSerializerTest.java
  34. 631 0
      ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
  35. 362 0
      ambari-server/src/test/java/org/apache/ambari/server/controller/internal/HostKerberosIdentityResourceProviderTest.java

+ 1 - 1
ambari-server/src/main/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRenderer.java

@@ -264,7 +264,7 @@ public class ClusterBlueprintRenderer extends BaseRenderer implements Renderer {
    * @return true if the node represents a collection; false otherwise
    */
   private boolean isCollection(TreeNode<Resource> node) {
-    String isCollection = node.getProperty("isCollection");
+    String isCollection = node.getStringProperty("isCollection");
     return isCollection != null && isCollection.equals("true");
   }
 

+ 83 - 0
ambari-server/src/main/java/org/apache/ambari/server/api/query/render/HostKerberosIdentityCsvRenderer.java

@@ -0,0 +1,83 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.api.query.render;
+
+import org.apache.ambari.server.api.services.Result;
+import org.apache.ambari.server.api.services.serializers.CsvSerializer;
+import org.apache.ambari.server.api.util.TreeNode;
+import org.apache.ambari.server.controller.spi.Resource;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Renderer which updates a KerberosHostIdentity resource so it may be serialized using a CSV serializer.
+ * <p/>
+ * This implementation extends the DefaultRenderer to add the header mapping and column order information
+ * to the root of TreeNode structure.
+ *
+ * @see CsvSerializer
+ */
+public class HostKerberosIdentityCsvRenderer extends DefaultRenderer {
+
+  @Override
+  public Result finalizeResult(Result queryResult) {
+    TreeNode<Resource> resultTree = queryResult.getResultTree();
+
+    if(resultTree != null) {
+      // TODO: Determine which columns/fields are relevant for the query and prune as needed.
+      Map<String, String> columnMap = new HashMap<String, String>() {{
+        put("KerberosIdentity/host_name", "host");
+        put("KerberosIdentity/description", "description");
+        put("KerberosIdentity/principal_name", "principal name");
+        put("KerberosIdentity/principal_type", "principal type");
+        put("KerberosIdentity/principal_local_username", "local username");
+        put("KerberosIdentity/keytab_file_path", "keytab file path");
+        put("KerberosIdentity/keytab_file_owner", "keytab file owner");
+        put("KerberosIdentity/keytab_file_owner_access", "keytab file owner access");
+        put("KerberosIdentity/keytab_file_group", "keytab file group");
+        put("KerberosIdentity/keytab_file_group_access", "keytab file group access");
+        put("KerberosIdentity/keytab_file_mode", "keytab file mode");
+        put("KerberosIdentity/keytab_file_installed", "keytab file installed");
+      }};
+
+      List<String> columnOrder = new ArrayList<String>() {{
+        add("KerberosIdentity/host_name");
+        add("KerberosIdentity/description");
+        add("KerberosIdentity/principal_name");
+        add("KerberosIdentity/principal_type");
+        add("KerberosIdentity/principal_local_username");
+        add("KerberosIdentity/keytab_file_path");
+        add("KerberosIdentity/keytab_file_owner");
+        add("KerberosIdentity/keytab_file_owner_access");
+        add("KerberosIdentity/keytab_file_group");
+        add("KerberosIdentity/keytab_file_group_access");
+        add("KerberosIdentity/keytab_file_mode");
+        add("KerberosIdentity/keytab_file_installed");
+      }};
+
+      resultTree.setProperty(CsvSerializer.PROPERTY_COLUMN_MAP, columnMap);
+      resultTree.setProperty(CsvSerializer.PROPERTY_COLUMN_ORDER, columnOrder);
+    }
+
+    return queryResult;
+  }
+}

+ 25 - 3
ambari-server/src/main/java/org/apache/ambari/server/api/resources/BaseResourceDefinition.java

@@ -28,6 +28,8 @@ import org.apache.ambari.server.controller.spi.ClusterController;
 import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.ambari.server.controller.spi.Schema;
 import org.apache.ambari.server.controller.utilities.ClusterControllerHelper;
+import org.apache.commons.codec.EncoderException;
+import org.apache.commons.codec.net.URLCodec;
 
 import java.util.ArrayList;
 import java.util.Collection;
@@ -180,13 +182,33 @@ public abstract class BaseResourceDefinition implements ResourceDefinition {
         }
 
         Schema schema = getClusterController().getSchema(r.getType());
-        Object id     = r.getPropertyValue(schema.getKeyPropertyId(r.getType()));
+        Object id = r.getPropertyValue(schema.getKeyPropertyId(r.getType()));
 
-        href = parent.getProperty("isCollection").equals("true") ?
-            href + id : href + parent.getName() + '/' + id;
+        String hrefIdPart = urlencode(id);
+
+        href = parent.getStringProperty("isCollection").equals("true") ?
+            href + hrefIdPart : href + parent.getName() + '/' + hrefIdPart;
       }
       resultNode.setProperty("href", href);
     }
+
+    /**
+     * URL encodes the id (string) value
+     *
+     * @param id the id to URL encode
+     * @return null if id is null, else the URL encoded value of the id
+     */
+    protected String urlencode(Object id) {
+      if (id == null)
+        return "";
+      else {
+        try {
+          return new URLCodec().encode(id.toString());
+        } catch (EncoderException e) {
+          return id.toString();
+        }
+      }
+    }
   }
 
   /**

+ 1 - 1
ambari-server/src/main/java/org/apache/ambari/server/api/resources/HostComponentResourceDefinition.java

@@ -106,7 +106,7 @@ public class HostComponentResourceDefinition extends BaseResourceDefinition {
       //todo: look at partial request fields to ensure that hosts should be returned
       if (request.getResource().getResourceDefinition().getType() == getType()) {
         // only add host if query host_resource was directly queried
-        String nodeHref = resultNode.getProperty("href");
+        String nodeHref = resultNode.getStringProperty("href");
         resultNode.getObject().setProperty(PropertyHelper.getPropertyId("host", "href"),
             nodeHref.substring(0, nodeHref.indexOf("/host_components/")));
       }

+ 56 - 0
ambari-server/src/main/java/org/apache/ambari/server/api/resources/HostKerberosIdentityResourceDefinition.java

@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.ambari.server.api.resources;
+
+import org.apache.ambari.server.api.query.render.HostKerberosIdentityCsvRenderer;
+import org.apache.ambari.server.api.query.render.Renderer;
+import org.apache.ambari.server.controller.spi.Resource;
+
+/**
+ * HostKerberosIdentity resource definition.
+ */
+public class HostKerberosIdentityResourceDefinition extends BaseResourceDefinition {
+
+  /**
+   * Constructor.
+   */
+  public HostKerberosIdentityResourceDefinition() {
+    super(Resource.Type.HostKerberosIdentity);
+  }
+
+  @Override
+  public String getPluralName() {
+    return "kerberos_identities";
+  }
+
+  @Override
+  public String getSingularName() {
+    return "kerberos_identity";
+  }
+
+  @Override
+  public Renderer getRenderer(String name) {
+    if ("csv".equalsIgnoreCase(name)) {
+      return new HostKerberosIdentityCsvRenderer();
+    } else {
+      return super.getRenderer(name);
+    }
+  }
+}

+ 1 - 0
ambari-server/src/main/java/org/apache/ambari/server/api/resources/HostResourceDefinition.java

@@ -52,6 +52,7 @@ public class HostResourceDefinition extends BaseResourceDefinition {
     subs.add(new SubResourceDefinition(Resource.Type.HostComponent));
     subs.add(new SubResourceDefinition(Resource.Type.Alert));
     subs.add(new SubResourceDefinition(Resource.Type.HostStackVersion));
+    subs.add(new SubResourceDefinition(Resource.Type.HostKerberosIdentity));
     return subs;
   }
 }

+ 4 - 0
ambari-server/src/main/java/org/apache/ambari/server/api/resources/ResourceInstanceFactoryImpl.java

@@ -384,6 +384,10 @@ public class ResourceInstanceFactoryImpl implements ResourceInstanceFactory {
         resourceDefinition = new ActiveWidgetLayoutResourceDefinition();
         break;
 
+      case HostKerberosIdentity:
+        resourceDefinition = new HostKerberosIdentityResourceDefinition();
+        break;
+
       default:
         throw new IllegalArgumentException("Unsupported resource type: " + type);
     }

+ 6 - 0
ambari-server/src/main/java/org/apache/ambari/server/api/services/BaseService.java

@@ -24,6 +24,7 @@ import org.apache.ambari.server.api.resources.ResourceInstanceFactoryImpl;
 import org.apache.ambari.server.api.services.parsers.BodyParseException;
 import org.apache.ambari.server.api.services.parsers.JsonRequestBodyParser;
 import org.apache.ambari.server.api.services.parsers.RequestBodyParser;
+import org.apache.ambari.server.api.services.serializers.CsvSerializer;
 import org.apache.ambari.server.api.services.serializers.JsonSerializer;
 import org.apache.ambari.server.api.services.serializers.ResultSerializer;
 import org.apache.ambari.server.controller.spi.Resource;
@@ -41,6 +42,7 @@ import java.util.Set;
  * Provides common functionality to all services.
  */
 public abstract class BaseService {
+  public final static MediaType MEDIA_TYPE_TEXT_CSV_TYPE = new MediaType("text", "csv");
 
   /**
    * Factory for creating resource instances.
@@ -175,6 +177,10 @@ public abstract class BaseService {
         }
       };
     }
+    else if (mediaType.equals(MEDIA_TYPE_TEXT_CSV_TYPE)) {
+      return new CsvSerializer();
+    }
+
     throw new IllegalArgumentException("The media type " + mediaType + " is not supported.");
   }
 

+ 16 - 0
ambari-server/src/main/java/org/apache/ambari/server/api/services/ClusterService.java

@@ -418,6 +418,22 @@ public class ClusterService extends BaseService {
     return new HostComponentService(clusterName, null);
   }
 
+  /**
+   * Get the host Kerberos identity resource without specifying the parent host component.
+   * Allows accessing host Kerberos identity resources across hosts.
+   *
+   * @param request      the request
+   * @param clusterName  the cluster name
+   *
+   * @return  the host component service with no parent set
+   */
+  @Path("{clusterName}/kerberos_identities")
+  public HostKerberosIdentityService getHostKerberosIdentityHandler(@Context javax.ws.rs.core.Request request, @PathParam("clusterName") String clusterName) {
+
+    hasPermission(Request.Type.valueOf(request.getMethod()), clusterName);
+    return new HostKerberosIdentityService(clusterName, null);
+  }
+
   /**
    * Get the component resource without specifying the parent service.
    * Allows accessing component resources across services.

+ 121 - 0
ambari-server/src/main/java/org/apache/ambari/server/api/services/HostKerberosIdentityService.java

@@ -0,0 +1,121 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.api.services;
+
+import org.apache.ambari.server.api.resources.ResourceInstance;
+import org.apache.ambari.server.controller.spi.Resource;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Service responsible for kerberos identity resource requests.
+ */
+public class HostKerberosIdentityService extends BaseService {
+
+  /**
+   * Parent cluster id.
+   */
+  private String clusterName;
+
+  /**
+   * Relevant hostname
+   */
+  private String hostName;
+
+  /**
+   * Constructor.
+   *
+   * @param clusterName cluster name
+   * @param hostName    host name
+   */
+  public HostKerberosIdentityService(String clusterName, String hostName) {
+    this.clusterName = clusterName;
+    this.hostName = hostName;
+  }
+
+  /**
+   * Handles GET: /clusters/{clusterID}/services/{serviceID}/components/{componentID}/kerberos_identities/{identityId}
+   * Get a specific Kerberos identity.
+   *
+   * @param headers    http headers
+   * @param ui         uri info
+   * @param identityID Kerberos identity id
+   * @param format     output format
+   * @return a component resource representation
+   */
+  @GET
+  @Path("{kerberosIdentityID}")
+  @Produces("text/plain")
+  public Response getKerberosIdentity(String body, @Context HttpHeaders headers, @Context UriInfo ui,
+                                      @PathParam("kerberosIdentityID") String identityID,
+                                      @QueryParam("format") String format) {
+
+    MediaType mediaType;
+    if ("csv".equalsIgnoreCase(format)) {
+      mediaType = MEDIA_TYPE_TEXT_CSV_TYPE;
+    } else {
+      mediaType = null;
+    }
+
+    return handleRequest(headers, body, ui, Request.Type.GET, mediaType, createResource(clusterName, hostName, identityID));
+  }
+
+  /**
+   * Handles GET: /clusters/{clusterID}/services/{serviceID}/components/{componentID}/kerberos_identities
+   * Get all Kerberos identities for a service.
+   *
+   * @param headers http headers
+   * @param ui      uri info
+   * @return component collection resource representation
+   */
+  @GET
+  @Produces("text/plain")
+  public Response getKerberosIdentities(String body, @Context HttpHeaders headers, @Context UriInfo ui, @QueryParam("format") String format) {
+    return getKerberosIdentity(body, headers, ui, null, format);
+  }
+
+  /**
+   * Create a kerberos identity resource instance.
+   *
+   * @param clusterName cluster name
+   * @param hostName    host name
+   * @param identityId  Kerberos identity id
+   * @return a component resource instance
+   */
+  ResourceInstance createResource(String clusterName, String hostName, String identityId) {
+    Map<Resource.Type, String> mapIds = new HashMap<Resource.Type, String>();
+    mapIds.put(Resource.Type.Cluster, clusterName);
+    mapIds.put(Resource.Type.Host, hostName);
+    mapIds.put(Resource.Type.HostKerberosIdentity, identityId);
+
+    return createResource(Resource.Type.HostKerberosIdentity, mapIds);
+  }
+
+}

+ 11 - 0
ambari-server/src/main/java/org/apache/ambari/server/api/services/HostService.java

@@ -206,6 +206,17 @@ public class HostService extends BaseService {
     return new HostComponentService(m_clusterName, hostName);
   }
 
+  /**
+   * Get the kerberos_identities sub-resource.
+   *
+   * @param hostName host id
+   * @return the host_components service
+   */
+  @Path("{hostName}/kerberos_identities")
+  public HostKerberosIdentityService getHostKerberosIdentityHandler(@PathParam("hostName") String hostName) {
+    return new HostKerberosIdentityService(m_clusterName, hostName);
+  }
+
   /**
    * Get the alerts sub-resource.
    *

+ 2 - 2
ambari-server/src/main/java/org/apache/ambari/server/api/services/ResultPostProcessorImpl.java

@@ -89,7 +89,7 @@ public class ResultPostProcessorImpl implements ResultPostProcessor {
       for (ResourceDefinition.PostProcessor processor : listProcessors) {
         processor.process(m_request, node, href);
       }
-      href = node.getProperty("href");
+      href = node.getStringProperty("href");
       int i = href.indexOf('?');
       if (i != -1) {
         try {
@@ -99,7 +99,7 @@ public class ResultPostProcessorImpl implements ResultPostProcessor {
         }
       }
     } else {
-      String isItemsCollection = node.getProperty("isCollection");
+      String isItemsCollection = node.getStringProperty("isCollection");
       if (node.getName() == null && "true".equals(isItemsCollection)) {
         node.setName("items");
         node.setProperty("href", href);

+ 224 - 0
ambari-server/src/main/java/org/apache/ambari/server/api/services/serializers/CsvSerializer.java

@@ -0,0 +1,224 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.api.services.serializers;
+
+import org.apache.ambari.server.api.services.Result;
+import org.apache.ambari.server.api.services.ResultStatus;
+import org.apache.ambari.server.api.util.TreeNode;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.commons.csv.CSVFormat;
+import org.apache.commons.csv.CSVPrinter;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * CSV serializer used to generate a CSV-formatted document from a result.
+ */
+public class CsvSerializer implements ResultSerializer {
+  /**
+   * Property name for the CsvSerializer-specific column map where the value of this property
+   * contains a map of resource property names to header descriptive names.
+   * <p/>
+   * If not specified, no header record will be serialized.
+   */
+  public static final String PROPERTY_COLUMN_MAP = "csv_column_map";
+
+  /**
+   * Property name for the CsvSerializer-specific column order where the value of this property
+   * contains a list of resource property names in the order to export.
+   * <p/>
+   * If not specified, the order will be taken from key order in the csv_column_map property (if
+   * available) or from the "natural" order of the properties in the resource.
+   */
+  public static final String PROPERTY_COLUMN_ORDER = "csv_column_order";
+
+  /**
+   * Serialize the result into a CSV-formatted text document.
+   * <p/>
+   * It is expected that the result set is a collection of flat resources - no sub-resources will be
+   * included in the output.  The root of the tree structure may have a column map (csv_column_map)
+   * and a column order (csv_column_order) property set to indicate the header record and ordering
+   * of the columns.
+   * <p/>
+   * The csv_column_map is a map of resource property names to header descriptive names.  If not
+   * specified, a header record will not be serialized.
+   * <p/>
+   * The csv_column_order is a list of resource property names declaring the order of the columns.
+   * If not specified, the order will be taken from the key order of csv_column_map or the "natural"
+   * ordering of the resource property names, both may be unpredictable.
+   *
+   * @param result internal result
+   * @return a String containing the CSV-formatted document
+   */
+  @Override
+  public Object serialize(Result result) {
+    if (result.getStatus().isErrorState()) {
+      return serializeError(result.getStatus());
+    } else {
+
+      try {
+        // A StringBuffer to store the CSV-formatted document while building it.  It may be
+        // necessary to use file-based storage if the data set is expected to be really large.
+        StringBuffer buffer = new StringBuffer();
+
+        TreeNode<Resource> root = result.getResultTree();
+
+        if (root != null) {
+          CSVPrinter csvPrinter = new CSVPrinter(buffer, CSVFormat.DEFAULT);
+
+          // TODO: recursively handle tree structure, for now only handle single level of detail
+          if ("true".equalsIgnoreCase(root.getStringProperty("isCollection"))) {
+            List<String> fieldNameOrder = processHeader(csvPrinter, root);
+
+            Collection<TreeNode<Resource>> children = root.getChildren();
+            if (children != null) {
+              // Iterate over the child nodes of the collection an add each as a new record in the
+              // CSV document.
+              for (TreeNode<Resource> child : children) {
+                processRecord(csvPrinter, child, fieldNameOrder);
+              }
+            }
+          }
+        }
+
+        return buffer.toString();
+      } catch (IOException e) {
+        //todo: exception handling.  Create ResultStatus 500 and call serializeError
+        throw new RuntimeException("Unable to serialize to csv: " + e, e);
+      }
+    }
+  }
+
+  @Override
+  public Object serializeError(ResultStatus error) {
+    try {
+      StringBuffer buffer = new StringBuffer();
+      CSVPrinter csvPrinter = new CSVPrinter(buffer, CSVFormat.DEFAULT);
+
+      csvPrinter.printRecord(Arrays.asList("status", "message"));
+      csvPrinter.printRecord(Arrays.asList(error.getStatus().getStatus(), error.getMessage()));
+
+      return buffer.toString();
+    } catch (IOException e) {
+      //todo: exception handling.  Create ResultStatus 500 and call serializeError
+      throw new RuntimeException("Unable to serialize to csv: " + e, e);
+    }
+  }
+
+  /**
+   * Generate a CSV record by processing the resource embedded in the specified node.  The order of
+   * the fields are to be set as specified.
+   *
+   * @param csvPrinter     the CSVPrinter used to create the record
+   * @param node           the relevant node in the collection
+   * @param fieldNameOrder a list of field names indicating order
+   * @throws IOException if an error occurs creating the CSV record
+   */
+  private void processRecord(CSVPrinter csvPrinter, TreeNode<Resource> node, List<String> fieldNameOrder)
+      throws IOException {
+
+    if (node != null) {
+      Resource recordResource = node.getObject();
+      if (recordResource != null) {
+        List<Object> values = new ArrayList<Object>();
+
+        if (fieldNameOrder != null) {
+          for (String fieldName : fieldNameOrder) {
+            values.add(recordResource.getPropertyValue(fieldName));
+          }
+        } else {
+          Map<String, Map<String, Object>> properties = recordResource.getPropertiesMap();
+          if (properties != null) {
+
+            for (Map.Entry<String, Map<String, Object>> outer : properties.entrySet()) {
+              Map<String, Object> innerProperties = outer.getValue();
+
+              if (innerProperties != null) {
+                for (Map.Entry<String, Object> inner : innerProperties.entrySet()) {
+                  values.add(inner.getValue());
+                }
+              }
+            }
+          }
+        }
+
+        if (!values.isEmpty()) {
+          csvPrinter.printRecord(values);
+        }
+      }
+    }
+  }
+
+  /**
+   * Optionally generate the CSV header record and establish the field order by processing the
+   * csv_column_map and csv_column_order node properties.
+   *
+   * @param csvPrinter the CSVPrinter used to create the record
+   * @param node       a node containing header and ordering information
+   * @return a list indicating the field order for the CSV records
+   * @throws IOException if an error occurs creating the CSV header
+   */
+  private List<String> processHeader(CSVPrinter csvPrinter, TreeNode<Resource> node) throws IOException {
+    Map<String, String> header;
+    List<String> fieldNameOrder;
+    Object object;
+
+    // Get the explicitly set header property for the current tree node. This may be null if no
+    // header needs to be written out. The header map is expected to be a map of field names to
+    // descriptive header values.
+    object = node.getProperty(PROPERTY_COLUMN_MAP);
+    if (object instanceof Map) {
+      header = (Map<String, String>) object;
+    } else {
+      header = null;
+    }
+
+    // Determine the field name order.  If explicitly set, use it, else grab it from the header map
+    // (if available).
+    object = node.getProperty(PROPERTY_COLUMN_ORDER);
+    if (object instanceof List) {
+      // Use the explicitly set ordering
+      fieldNameOrder = (List<String>) object;
+    } else if (header != null) {
+      // Use the ordering specified by the map.
+      fieldNameOrder = new ArrayList<String>(header.keySet());
+    } else {
+      fieldNameOrder = null;
+    }
+
+    if (header != null) {
+      // build the header record
+      List<String> headerNames = new ArrayList<String>();
+      for (String fieldName : fieldNameOrder) {
+        headerNames.add(header.get(fieldName));
+      }
+
+      // write out the header...
+      csvPrinter.printRecord(headerNames);
+    }
+
+    return fieldNameOrder;
+  }
+}

+ 2 - 2
ambari-server/src/main/java/org/apache/ambari/server/api/services/serializers/JsonSerializer.java

@@ -214,14 +214,14 @@ public class JsonSerializer implements ResultSerializer {
   }
 
   private void writeHref(TreeNode<Resource> node) throws IOException {
-    String hrefProp = node.getProperty("href");
+    String hrefProp = node.getStringProperty("href");
     if (hrefProp != null) {
       m_generator.writeStringField("href", hrefProp);
     }
   }
 
   private void writeItemCount(TreeNode<Resource> node) throws IOException {
-    String countProp = node.getProperty("count");
+    String countProp = node.getStringProperty("count");
     if (countProp != null) {
       m_generator.writeStringField("itemTotal", countProp);
       // Write once

+ 10 - 2
ambari-server/src/main/java/org/apache/ambari/server/api/util/TreeNode.java

@@ -98,7 +98,7 @@ public interface TreeNode<T> {
    * @param name  the name of the property
    * @param value the value of the property
    */
-  public void setProperty(String name, String value);
+  public void setProperty(String name, Object value);
 
   /**
    * Get the specified node property.
@@ -106,7 +106,15 @@ public interface TreeNode<T> {
    * @param name property name
    * @return the requested property value or null
    */
-  public String getProperty(String name);
+  public Object getProperty(String name);
+
+  /**
+   * Get the specified node property as a String.
+   *
+   * @param name property name
+   * @return the requested property value (as a String) or null
+   */
+  public String getStringProperty(String name);
 
   /**
    * Remove a property from the node.

+ 10 - 4
ambari-server/src/main/java/org/apache/ambari/server/api/util/TreeNodeImpl.java

@@ -50,7 +50,7 @@ public class TreeNodeImpl<T> implements TreeNode<T> {
   /**
    * properties
    */
-  private Map<String, String> m_mapNodeProps;
+  private Map<String, Object> m_mapNodeProps;
 
   /**
    * Constructor.
@@ -117,18 +117,24 @@ public class TreeNodeImpl<T> implements TreeNode<T> {
   }
 
   @Override
-  public void setProperty(String name, String value) {
+  public void setProperty(String name, Object value) {
     if (m_mapNodeProps == null) {
-      m_mapNodeProps = new LinkedHashMap<String, String>();
+      m_mapNodeProps = new LinkedHashMap<String, Object>();
     }
     m_mapNodeProps.put(name, value);
   }
 
   @Override
-  public String getProperty(String name) {
+  public Object getProperty(String name) {
     return m_mapNodeProps == null ? null : m_mapNodeProps.get(name);
   }
 
+  @Override
+  public String getStringProperty(String name) {
+    Object value = getProperty(name);
+    return value == null ? null : value.toString();
+  }
+
   @Override
   public void removeProperty(String name) {
     if (m_mapNodeProps != null) {

+ 2 - 0
ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java

@@ -61,6 +61,7 @@ import org.apache.ambari.server.configuration.Configuration.ConnectionPoolType;
 import org.apache.ambari.server.configuration.Configuration.DatabaseType;
 import org.apache.ambari.server.controller.internal.ComponentResourceProvider;
 import org.apache.ambari.server.controller.internal.HostComponentResourceProvider;
+import org.apache.ambari.server.controller.internal.HostKerberosIdentityResourceProvider;
 import org.apache.ambari.server.controller.internal.HostResourceProvider;
 import org.apache.ambari.server.controller.internal.MemberResourceProvider;
 import org.apache.ambari.server.controller.internal.RepositoryVersionResourceProvider;
@@ -389,6 +390,7 @@ public class ControllerModule extends AbstractModule {
         .implement(ResourceProvider.class, Names.named("component"), ComponentResourceProvider.class)
         .implement(ResourceProvider.class, Names.named("member"), MemberResourceProvider.class)
         .implement(ResourceProvider.class, Names.named("repositoryVersion"), RepositoryVersionResourceProvider.class)
+        .implement(ResourceProvider.class, Names.named("hostKerberosIdentity"), HostKerberosIdentityResourceProvider.class)
         .build(ResourceProviderFactory.class));
 
     install(new FactoryModuleBuilder().implement(

+ 167 - 0
ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java

@@ -1998,6 +1998,173 @@ public class KerberosHelper {
     }
   }
 
+  /**
+   * Returns the active identities for the named cluster.  Results are filtered by host, service,
+   * and/or component; and grouped by host.
+   * <p/>
+   * The cluster name is mandatory; however the active identities may be filtered by one or more of
+   * host, service, or component. A <code>null</code> value for any of these filters indicates no
+   * filter for that parameter.
+   * <p/>
+   * The return values are grouped by host and optionally <code>_HOST</code> in principals will be
+   * replaced with the relevant hostname if specified to do so.
+   *
+   * @param clusterName      the name of the relevant cluster (mandatory)
+   * @param hostName         the name of a host for which to find results, null indicates all hosts
+   * @param serviceName      the name of a service for which to find results, null indicates all
+   *                         services
+   * @param componentName    the name of a component for which to find results, null indicates all
+   *                         components
+   * @param replaceHostNames if true, _HOST in principals will be replace with the relevant host
+   *                         name
+   * @return a map of host names to kerberos identities
+   * @throws AmbariException if an error occurs processing the cluster's active identities
+   */
+  public Map<String, Collection<KerberosIdentityDescriptor>> getActiveIdentities(String clusterName,
+                                                                                 String hostName,
+                                                                                 String serviceName,
+                                                                                 String componentName,
+                                                                                 boolean replaceHostNames)
+      throws AmbariException {
+
+    if ((clusterName == null) || clusterName.isEmpty()) {
+      throw new IllegalArgumentException("Invalid argument, cluster name is required");
+    }
+
+    Cluster cluster = clusters.getCluster(clusterName);
+
+    if (cluster == null) {
+      throw new AmbariException(String.format("The cluster object for the cluster name %s is not available", clusterName));
+    }
+
+    Map<String, Collection<KerberosIdentityDescriptor>> activeIdentities = new HashMap<String, Collection<KerberosIdentityDescriptor>>();
+
+    if (isClusterKerberosEnabled(cluster)) {
+      Collection<String> hosts;
+
+      if (hostName == null) {
+        Map<String, Host> hostMap = clusters.getHostsForCluster(clusterName);
+        if (hostMap == null) {
+          hosts = null;
+        } else {
+          hosts = hostMap.keySet();
+        }
+      } else {
+        hosts = Collections.singleton(hostName);
+      }
+
+      if ((hosts != null) && !hosts.isEmpty()) {
+        KerberosDescriptor kerberosDescriptor = getKerberosDescriptor(cluster);
+
+        if (kerberosDescriptor != null) {
+          Map<String, String> kerberosDescriptorProperties = kerberosDescriptor.getProperties();
+
+          for (String hostname : hosts) {
+            Map<String, KerberosIdentityDescriptor> hostActiveIdentities = new HashMap<String, KerberosIdentityDescriptor>();
+            List<KerberosIdentityDescriptor> identities = getActiveIdentities(cluster, hostname, serviceName, componentName, kerberosDescriptor);
+
+            if (!identities.isEmpty()) {
+              // Calculate the current host-specific configurations. These will be used to replace
+              // variables within the Kerberos descriptor data
+              Map<String, Map<String, String>> configurations = calculateConfigurations(cluster, hostname, kerberosDescriptorProperties);
+
+              for (KerberosIdentityDescriptor identity : identities) {
+                KerberosPrincipalDescriptor principalDescriptor = identity.getPrincipalDescriptor();
+                String principal = null;
+
+                if (principalDescriptor != null) {
+                  principal = KerberosDescriptor.replaceVariables(principalDescriptor.getValue(), configurations);
+                }
+
+                if (principal != null) {
+                  if (replaceHostNames) {
+                    principal = principal.replace("_HOST", hostname);
+                  }
+
+                  if (!hostActiveIdentities.containsKey(principal)) {
+                    KerberosPrincipalDescriptor resolvedPrincipalDescriptor =
+                        new KerberosPrincipalDescriptor(principal,
+                            principalDescriptor.getType(),
+                            KerberosDescriptor.replaceVariables(principalDescriptor.getConfiguration(), configurations),
+                            KerberosDescriptor.replaceVariables(principalDescriptor.getLocalUsername(), configurations));
+
+                    KerberosKeytabDescriptor resolvedKeytabDescriptor;
+
+                    KerberosKeytabDescriptor keytabDescriptor = identity.getKeytabDescriptor();
+                    if (keytabDescriptor == null) {
+                      resolvedKeytabDescriptor = null;
+                    } else {
+                      resolvedKeytabDescriptor =
+                          new KerberosKeytabDescriptor(
+                              KerberosDescriptor.replaceVariables(keytabDescriptor.getFile(), configurations),
+                              KerberosDescriptor.replaceVariables(keytabDescriptor.getOwnerName(), configurations),
+                              KerberosDescriptor.replaceVariables(keytabDescriptor.getOwnerAccess(), configurations),
+                              KerberosDescriptor.replaceVariables(keytabDescriptor.getGroupName(), configurations),
+                              KerberosDescriptor.replaceVariables(keytabDescriptor.getGroupAccess(), configurations),
+                              KerberosDescriptor.replaceVariables(keytabDescriptor.getConfiguration(), configurations),
+                              keytabDescriptor.isCachable());
+                    }
+
+                    hostActiveIdentities.put(principal, new KerberosIdentityDescriptor(
+                        identity.getName(),
+                        resolvedPrincipalDescriptor,
+                        resolvedKeytabDescriptor));
+                  }
+                }
+              }
+            }
+
+            activeIdentities.put(hostname, hostActiveIdentities.values());
+          }
+        }
+      }
+    }
+
+    return activeIdentities;
+  }
+
+  private List<KerberosIdentityDescriptor> getActiveIdentities(Cluster cluster,
+                                                               String hostname,
+                                                               String serviceName,
+                                                               String componentName,
+                                                               KerberosDescriptor kerberosDescriptor)
+      throws AmbariException {
+
+    List<KerberosIdentityDescriptor> identities = new ArrayList<KerberosIdentityDescriptor>();
+
+    List<ServiceComponentHost> serviceComponentHosts = cluster.getServiceComponentHosts(hostname);
+
+    if(serviceComponentHosts != null) {
+      for (ServiceComponentHost serviceComponentHost : serviceComponentHosts) {
+        String schServiceName = serviceComponentHost.getServiceName();
+        String schComponentName = serviceComponentHost.getServiceComponentName();
+
+        if (((serviceName == null) || serviceName.equals(schServiceName)) &&
+            ((componentName == null) || componentName.equals(schComponentName))) {
+
+          KerberosServiceDescriptor serviceDescriptor = kerberosDescriptor.getService(schServiceName);
+
+          if (serviceDescriptor != null) {
+            List<KerberosIdentityDescriptor> serviceIdentities = serviceDescriptor.getIdentities(true);
+            if (serviceIdentities != null) {
+              identities.addAll(serviceIdentities);
+            }
+
+            KerberosComponentDescriptor componentDescriptor = serviceDescriptor.getComponent(schComponentName);
+            if (componentDescriptor != null) {
+              List<KerberosIdentityDescriptor> componentIdentities = componentDescriptor.getIdentities(true);
+              if (componentIdentities != null) {
+                identities.addAll(componentIdentities);
+              }
+            }
+          }
+        }
+      }
+    }
+
+    return identities;
+  }
+
   /**
    * A enumeration of the supported custom operations
    */

+ 3 - 0
ambari-server/src/main/java/org/apache/ambari/server/controller/ResourceProviderFactory.java

@@ -53,6 +53,9 @@ public interface ResourceProviderFactory {
       Map<Type, String> keyPropertyIds,
       AmbariManagementController managementController);
 
+  @Named("hostKerberosIdentity")
+  ResourceProvider getHostKerberosIdentityResourceProvider(AmbariManagementController managementController);
+
   @Named("repositoryVersion")
   ResourceProvider getRepositoryVersionResourceProvider();
 }

+ 2 - 0
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractControllerResourceProvider.java

@@ -167,6 +167,8 @@ public abstract class AbstractControllerResourceProvider extends AbstractResourc
         return new WidgetLayoutResourceProvider(managementController);
       case Widget:
         return new WidgetResourceProvider(managementController);
+      case HostKerberosIdentity:
+        return resourceProviderFactory.getHostKerberosIdentityResourceProvider(managementController);
 
       default:
         throw new IllegalArgumentException("Unknown type " + type);

+ 243 - 0
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostKerberosIdentityResourceProvider.java

@@ -0,0 +1,243 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.controller.internal;
+
+import com.google.inject.Inject;
+import com.google.inject.assistedinject.Assisted;
+import com.google.inject.assistedinject.AssistedInject;
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.controller.AmbariManagementController;
+import org.apache.ambari.server.controller.KerberosHelper;
+import org.apache.ambari.server.controller.spi.NoSuchParentResourceException;
+import org.apache.ambari.server.controller.spi.NoSuchResourceException;
+import org.apache.ambari.server.controller.spi.Predicate;
+import org.apache.ambari.server.controller.spi.Request;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.SystemException;
+import org.apache.ambari.server.controller.spi.UnsupportedPropertyException;
+import org.apache.ambari.server.orm.dao.HostDAO;
+import org.apache.ambari.server.orm.dao.KerberosPrincipalDAO;
+import org.apache.ambari.server.orm.dao.KerberosPrincipalHostDAO;
+import org.apache.ambari.server.orm.entities.HostEntity;
+import org.apache.ambari.server.state.kerberos.KerberosIdentityDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosKeytabDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosPrincipalDescriptor;
+
+import java.text.DecimalFormat;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Read-only resource provider for Kerberos identity resources.
+ */
+public class HostKerberosIdentityResourceProvider extends ReadOnlyResourceProvider {
+
+  protected static final String KERBEROS_IDENTITY_CLUSTER_NAME_PROPERTY_ID = "KerberosIdentity/cluster_name";
+  protected static final String KERBEROS_IDENTITY_HOST_NAME_PROPERTY_ID = "KerberosIdentity/host_name";
+  protected static final String KERBEROS_IDENTITY_DESCRIPTION_PROPERTY_ID = "KerberosIdentity/description";
+  protected static final String KERBEROS_IDENTITY_PRINCIPAL_NAME_PROPERTY_ID = "KerberosIdentity/principal_name";
+  protected static final String KERBEROS_IDENTITY_PRINCIPAL_TYPE_PROPERTY_ID = "KerberosIdentity/principal_type";
+  protected static final String KERBEROS_IDENTITY_PRINCIPAL_LOCAL_USERNAME_PROPERTY_ID = "KerberosIdentity/principal_local_username";
+  protected static final String KERBEROS_IDENTITY_KEYTAB_FILE_PATH_PROPERTY_ID = "KerberosIdentity/keytab_file_path";
+  protected static final String KERBEROS_IDENTITY_KEYTAB_FILE_OWNER_PROPERTY_ID = "KerberosIdentity/keytab_file_owner";
+  protected static final String KERBEROS_IDENTITY_KEYTAB_FILE_OWNER_ACCESS_PROPERTY_ID = "KerberosIdentity/keytab_file_owner_access";
+  protected static final String KERBEROS_IDENTITY_KEYTAB_FILE_GROUP_PROPERTY_ID = "KerberosIdentity/keytab_file_group";
+  protected static final String KERBEROS_IDENTITY_KEYTAB_FILE_GROUP_ACCESS_PROPERTY_ID = "KerberosIdentity/keytab_file_group_access";
+  protected static final String KERBEROS_IDENTITY_KEYTAB_FILE_MODE_PROPERTY_ID = "KerberosIdentity/keytab_file_mode";
+  protected static final String KERBEROS_IDENTITY_KEYTAB_FILE_INSTALLED_PROPERTY_ID = "KerberosIdentity/keytab_file_installed";
+
+  protected static final Map<Resource.Type, String> PK_PROPERTY_MAP = Collections.unmodifiableMap(
+      new HashMap<Resource.Type, String>() {{
+        put(Resource.Type.Cluster, KERBEROS_IDENTITY_CLUSTER_NAME_PROPERTY_ID);
+        put(Resource.Type.Host, KERBEROS_IDENTITY_HOST_NAME_PROPERTY_ID);
+        put(Resource.Type.HostKerberosIdentity, KERBEROS_IDENTITY_PRINCIPAL_NAME_PROPERTY_ID);
+      }}
+  );
+
+  protected static final Set<String> PK_PROPERTY_IDS = Collections.unmodifiableSet(
+      new HashSet<String>(PK_PROPERTY_MAP.values())
+  );
+
+  protected static final Set<String> PROPERTY_IDS = Collections.unmodifiableSet(
+      new HashSet<String>() {{
+        add(KERBEROS_IDENTITY_CLUSTER_NAME_PROPERTY_ID);
+        add(KERBEROS_IDENTITY_HOST_NAME_PROPERTY_ID);
+        add(KERBEROS_IDENTITY_DESCRIPTION_PROPERTY_ID);
+        add(KERBEROS_IDENTITY_PRINCIPAL_NAME_PROPERTY_ID);
+        add(KERBEROS_IDENTITY_PRINCIPAL_TYPE_PROPERTY_ID);
+        add(KERBEROS_IDENTITY_PRINCIPAL_LOCAL_USERNAME_PROPERTY_ID);
+        add(KERBEROS_IDENTITY_KEYTAB_FILE_PATH_PROPERTY_ID);
+        add(KERBEROS_IDENTITY_KEYTAB_FILE_OWNER_PROPERTY_ID);
+        add(KERBEROS_IDENTITY_KEYTAB_FILE_OWNER_ACCESS_PROPERTY_ID);
+        add(KERBEROS_IDENTITY_KEYTAB_FILE_GROUP_PROPERTY_ID);
+        add(KERBEROS_IDENTITY_KEYTAB_FILE_GROUP_ACCESS_PROPERTY_ID);
+        add(KERBEROS_IDENTITY_KEYTAB_FILE_MODE_PROPERTY_ID);
+        add(KERBEROS_IDENTITY_KEYTAB_FILE_INSTALLED_PROPERTY_ID);
+      }}
+  );
+
+  @Inject
+  private KerberosHelper kerberosHelper;
+
+  /**
+   * KerberosPrincipalHostDAO used to get Kerberos principal details
+   */
+  @Inject
+  private KerberosPrincipalHostDAO kerberosPrincipalHostDAO;
+
+  /**
+   * KerberosPrincipalDAO used to get Kerberos principal details
+   */
+  @Inject
+  private KerberosPrincipalDAO kerberosPrincipalDAO;
+
+  /**
+   * HostDAO used to translate host names to host ids
+   */
+  @Inject
+  private HostDAO hostDAO;
+
+  /**
+   * Create a  new resource provider for the given management controller.
+   *
+   * @param managementController the management controller
+   */
+  @AssistedInject
+  HostKerberosIdentityResourceProvider(@Assisted AmbariManagementController managementController) {
+    super(PROPERTY_IDS, PK_PROPERTY_MAP, managementController);
+  }
+
+
+  @Override
+  public Set<Resource> getResources(Request request, Predicate predicate)
+      throws SystemException, UnsupportedPropertyException, NoSuchResourceException, NoSuchParentResourceException {
+    return getResources(new GetResourcesCommand(getPropertyMaps(predicate), getRequestPropertyIds(request, predicate)));
+  }
+
+  @Override
+  protected Set<String> getPKPropertyIds() {
+    return PK_PROPERTY_IDS;
+  }
+
+  /**
+   * Inner class to implement the "get resource" command.
+   */
+  private class GetResourcesCommand implements Command<Set<Resource>> {
+    private final Set<Map<String, Object>> propertyMaps;
+    private final Set<String> requestPropertyIds;
+
+    public GetResourcesCommand(Set<Map<String, Object>> propertyMaps, Set<String> requestPropertyIds) {
+      this.propertyMaps = propertyMaps;
+      this.requestPropertyIds = requestPropertyIds;
+    }
+
+    @Override
+    public Set<Resource> invoke() throws AmbariException {
+      Set<Resource> resources = new HashSet<Resource>();
+
+      for (Map<String, Object> propertyMap : propertyMaps) {
+        String clusterName = (String) propertyMap.get(KERBEROS_IDENTITY_CLUSTER_NAME_PROPERTY_ID);
+        String hostName = (String) propertyMap.get(KERBEROS_IDENTITY_HOST_NAME_PROPERTY_ID);
+
+        // Retrieve the active identities for the cluster filtered and grouped by hostname
+        Map<String, Collection<KerberosIdentityDescriptor>> hostDescriptors =
+            kerberosHelper.getActiveIdentities(clusterName, hostName, null, null, true);
+
+        if (hostDescriptors != null) {
+          for (Map.Entry<String, Collection<KerberosIdentityDescriptor>> entry : hostDescriptors.entrySet()) {
+            Collection<KerberosIdentityDescriptor> descriptors = entry.getValue();
+
+            if (descriptors != null) {
+              String currentHostName = entry.getKey();
+              HostEntity host = hostDAO.findByName(currentHostName);
+              Long hostId = (host == null) ? null : host.getHostId();
+
+              for (KerberosIdentityDescriptor descriptor : descriptors) {
+                KerberosPrincipalDescriptor principalDescriptor = descriptor.getPrincipalDescriptor();
+                if (principalDescriptor != null) {
+                  String principal = principalDescriptor.getValue();
+
+                  if ((principal != null) && !principal.isEmpty()) {
+                    Resource resource = new ResourceImpl(Resource.Type.HostKerberosIdentity);
+
+                    setResourceProperty(resource, KERBEROS_IDENTITY_CLUSTER_NAME_PROPERTY_ID, clusterName, requestPropertyIds);
+                    setResourceProperty(resource, KERBEROS_IDENTITY_HOST_NAME_PROPERTY_ID, currentHostName, requestPropertyIds);
+
+                    setResourceProperty(resource, KERBEROS_IDENTITY_PRINCIPAL_NAME_PROPERTY_ID, principal, requestPropertyIds);
+                    setResourceProperty(resource, KERBEROS_IDENTITY_PRINCIPAL_TYPE_PROPERTY_ID, principalDescriptor.getType(), requestPropertyIds);
+                    setResourceProperty(resource, KERBEROS_IDENTITY_PRINCIPAL_LOCAL_USERNAME_PROPERTY_ID, principalDescriptor.getLocalUsername(), requestPropertyIds);
+
+                    String installedStatus;
+                    if ((hostId != null) && kerberosPrincipalDAO.exists(principal)) {
+                      if (kerberosPrincipalHostDAO.exists(principal, hostId)) {
+                        installedStatus = "true";
+                      } else {
+                        installedStatus = "false";
+                      }
+                    } else {
+                      installedStatus = "unknown";
+                    }
+
+                    setResourceProperty(resource, KERBEROS_IDENTITY_KEYTAB_FILE_INSTALLED_PROPERTY_ID, installedStatus, requestPropertyIds);
+
+                    KerberosKeytabDescriptor keytabDescriptor = descriptor.getKeytabDescriptor();
+                    if (keytabDescriptor != null) {
+                      String ownerAccess = keytabDescriptor.getOwnerAccess();
+                      String groupAccess = keytabDescriptor.getGroupAccess();
+                      int mode = 0;
+
+                      // Create the file access mode using *nix chmod values.
+                      if ("rw".equals(ownerAccess)) {
+                        mode += 600;
+                      } else if ("r".equals(ownerAccess)) {
+                        mode += 400;
+                      }
+
+                      if ("rw".equals(groupAccess)) {
+                        mode += 60;
+                      } else if ("r".equals(groupAccess)) {
+                        mode += 40;
+                      }
+
+                      setResourceProperty(resource, KERBEROS_IDENTITY_KEYTAB_FILE_PATH_PROPERTY_ID, keytabDescriptor.getFile(), requestPropertyIds);
+                      setResourceProperty(resource, KERBEROS_IDENTITY_KEYTAB_FILE_OWNER_PROPERTY_ID, keytabDescriptor.getOwnerName(), requestPropertyIds);
+                      setResourceProperty(resource, KERBEROS_IDENTITY_KEYTAB_FILE_OWNER_ACCESS_PROPERTY_ID, ownerAccess, requestPropertyIds);
+                      setResourceProperty(resource, KERBEROS_IDENTITY_KEYTAB_FILE_GROUP_PROPERTY_ID, keytabDescriptor.getGroupName(), requestPropertyIds);
+                      setResourceProperty(resource, KERBEROS_IDENTITY_KEYTAB_FILE_GROUP_ACCESS_PROPERTY_ID, groupAccess, requestPropertyIds);
+                      setResourceProperty(resource, KERBEROS_IDENTITY_KEYTAB_FILE_MODE_PROPERTY_ID, new DecimalFormat("000").format(mode), requestPropertyIds);
+                    }
+
+                    setResourceProperty(resource, KERBEROS_IDENTITY_DESCRIPTION_PROPERTY_ID, descriptor.getName(), requestPropertyIds);
+
+                    resources.add(resource);
+                  }
+                }
+              }
+            }
+          }
+        }
+      }
+
+      return resources;
+    }
+  }
+}

+ 3 - 1
ambari-server/src/main/java/org/apache/ambari/server/controller/spi/Resource.java

@@ -143,7 +143,8 @@ public interface Resource {
     Widget,
     WidgetLayout,
     ActiveWidgetLayout,
-    Theme;
+    Theme,
+    HostKerberosIdentity;
 
     /**
      * Get the {@link Type} that corresponds to this InternalType.
@@ -248,6 +249,7 @@ public interface Resource {
     public static final Type Widget = InternalType.Widget.getType();
     public static final Type WidgetLayout = InternalType.WidgetLayout.getType();
     public static final Type ActiveWidgetLayout = InternalType.ActiveWidgetLayout.getType();
+    public static final Type HostKerberosIdentity = InternalType.HostKerberosIdentity.getType();
 
     /**
      * The type name.

+ 13 - 0
ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosIdentityDescriptor.java

@@ -85,6 +85,19 @@ public class KerberosIdentityDescriptor extends AbstractKerberosDescriptor {
    */
   private String password = null;
 
+  /**
+   * Creates a new KerberosIdentityDescriptor
+   *
+   * @param name the name of this identity descriptor
+   * @param principal a KerberosPrincipalDescriptor
+   * @param keytab a KerberosKeytabDescriptor
+   */
+  public KerberosIdentityDescriptor(String name, KerberosPrincipalDescriptor principal, KerberosKeytabDescriptor keytab) {
+    setName(name);
+    setPrincipalDescriptor(principal);
+    setKeytabDescriptor(keytab);
+  }
+
   /**
    * Creates a new KerberosIdentityDescriptor
    * <p/>

+ 21 - 0
ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosKeytabDescriptor.java

@@ -154,6 +154,27 @@ public class KerberosKeytabDescriptor extends AbstractKerberosDescriptor {
    */
   private boolean cachable = true;
 
+  /**
+   * Creates a new KerberosKeytabDescriptor
+   *
+   * @param file the path to the keytab file
+   * @param ownerName the local username of the file owner
+   * @param ownerAccess the file access privileges for the file owner ("r", "rw", "")
+   * @param groupName the local group name with privileges to access the file
+   * @param groupAccess the file access privileges for the group ("r", "rw", "")
+   * @param configuration the configuration used to store the principal name
+   * @param cachable true if the keytab may be cached by Ambari; otherwise false
+   */
+  public KerberosKeytabDescriptor(String file, String ownerName, String ownerAccess, String groupName,
+                                  String groupAccess, String configuration, boolean cachable) {
+    setName(file);
+    setOwnerName(ownerName);
+    setOwnerAccess(ownerAccess);
+    setGroupName(groupName);
+    setGroupAccess(groupAccess);
+    setConfiguration(configuration);
+    setCachable(cachable);
+  }
   /**
    * Creates a new KerberosKeytabDescriptor
    * <p/>

+ 45 - 13
ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosPrincipalDescriptor.java

@@ -73,7 +73,7 @@ public class KerberosPrincipalDescriptor extends AbstractKerberosDescriptor {
    * <p/>
    * Expecting either "service" or "user"
    */
-  private KerberosPrincipalType type;
+  private KerberosPrincipalType type = KerberosPrincipalType.SERVICE;
 
   /**
    * A string declaring configuration type and property name indicating the property to be updated
@@ -87,7 +87,7 @@ public class KerberosPrincipalDescriptor extends AbstractKerberosDescriptor {
    * <p/>
    * Example: hdfs-site/dfs.namenode.kerberos.principal
    */
-  private String configuration;
+  private String configuration = null;
 
   /**
    * a String indicating the local username related to this principal, or null of no local mapping is
@@ -95,7 +95,24 @@ public class KerberosPrincipalDescriptor extends AbstractKerberosDescriptor {
    * <p/>
    * This value may be using in generating auth_to_local configuration settings.
    */
-  private String localUsername;
+  private String localUsername = null;
+
+  /**
+   * Creates a new KerberosPrincipalDescriptor
+   *
+   * @param principal the principal name
+   * @param type the principal type (user, service, etc...)
+   * @param configuration the configuration used to store the principal name
+   * @param localUsername the local username to map to the principal
+   */
+  public KerberosPrincipalDescriptor(String principal, KerberosPrincipalType type, String configuration, String localUsername) {
+    // The name for this KerberosPrincipalDescriptor is stored in the "value" entry in the map
+    // This is not automatically set by the super classes.
+    setName(principal);
+    setType((type == null) ? KerberosPrincipalType.SERVICE : type);
+    setConfiguration(configuration);
+    setLocalUsername(localUsername);
+  }
 
   /**
    * Creates a new KerberosPrincipalDescriptor
@@ -107,16 +124,11 @@ public class KerberosPrincipalDescriptor extends AbstractKerberosDescriptor {
    * @see org.apache.ambari.server.state.kerberos.KerberosPrincipalDescriptor
    */
   public KerberosPrincipalDescriptor(Map<?, ?> data) {
-    // The name for this KerberosPrincipalDescriptor is stored in the "value" entry in the map
-    // This is not automatically set by the super classes.
-    setName(getStringValue(data, "value"));
-
-    String type = getStringValue(data, "type");
-    setType((type == null) ? KerberosPrincipalType.SERVICE : KerberosPrincipalType.valueOf(type.toUpperCase()));
-
-    setConfiguration(getStringValue(data, "configuration"));
-
-    setLocalUsername(getStringValue(data, "local_username"));
+    this(getStringValue(data, "value"),
+        getKerberosPrincipalTypeValue(data, "type"),
+        getStringValue(data, "configuration"),
+        getStringValue(data, "local_username")
+    );
   }
 
   /**
@@ -299,4 +311,24 @@ public class KerberosPrincipalDescriptor extends AbstractKerberosDescriptor {
       return false;
     }
   }
+
+  /**
+   * Translates a string value representing a principal type to a KerberosPrincipalType.
+   * <p/>
+   * If no value is supplied for the key or a translation cannot be made then KerberosPrincipalType.SERVICE
+   * is assumed.
+   *
+   * @param map a Map containing the relevant data
+   * @param key a String declaring the item to retrieve
+   * @return a KerberosPrincipalType
+   * @throws IllegalArgumentException if the principal type value is not one of the expected types.
+   */
+  private static KerberosPrincipalType getKerberosPrincipalTypeValue(Map<?, ?> map, String key) {
+    String type = getStringValue(map, key);
+    if ((type == null) || type.isEmpty()) {
+      return KerberosPrincipalType.SERVICE;
+    } else {
+      return KerberosPrincipalType.valueOf(type.toUpperCase());
+    }
+  }
 }

+ 2 - 2
ambari-server/src/test/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRendererTest.java

@@ -193,7 +193,7 @@ public class ClusterBlueprintRendererTest {
     Result blueprintResult = renderer.finalizeResult(result);
 
     TreeNode<Resource> blueprintTree = blueprintResult.getResultTree();
-    assertNull(blueprintTree.getProperty("isCollection"));
+    assertNull(blueprintTree.getStringProperty("isCollection"));
     assertEquals(1, blueprintTree.getChildren().size());
 
     TreeNode<Resource> blueprintNode = blueprintTree.getChildren().iterator().next();
@@ -272,7 +272,7 @@ public class ClusterBlueprintRendererTest {
     Result blueprintResult = renderer.finalizeResult(result);
 
     TreeNode<Resource> blueprintTree = blueprintResult.getResultTree();
-    assertNull(blueprintTree.getProperty("isCollection"));
+    assertNull(blueprintTree.getStringProperty("isCollection"));
     assertEquals(1, blueprintTree.getChildren().size());
 
     TreeNode<Resource> blueprintNode = blueprintTree.getChildren().iterator().next();

+ 2 - 2
ambari-server/src/test/java/org/apache/ambari/server/api/query/render/MinimalRendererTest.java

@@ -348,7 +348,7 @@ public class MinimalRendererTest {
     renderer.finalizeProperties(createPropertyTree(), false);
 
     TreeNode<Resource> resultTree = renderer.finalizeResult(result).getResultTree();
-    assertNull(resultTree.getProperty("isCollection"));
+    assertNull(resultTree.getStringProperty("isCollection"));
     assertEquals(1, resultTree.getChildren().size());
 
     TreeNode<Resource> clusterNode = resultTree.getChildren().iterator().next();
@@ -414,7 +414,7 @@ public class MinimalRendererTest {
     renderer.finalizeProperties(createPropertyTreeWithSubProps(), false);
 
     TreeNode<Resource> resultTree = renderer.finalizeResult(result).getResultTree();
-    assertNull(resultTree.getProperty("isCollection"));
+    assertNull(resultTree.getStringProperty("isCollection"));
     assertEquals(1, resultTree.getChildren().size());
 
     TreeNode<Resource> clusterNode = resultTree.getChildren().iterator().next();

+ 2 - 2
ambari-server/src/test/java/org/apache/ambari/server/api/resources/BaseResourceDefinitionTest.java

@@ -106,7 +106,7 @@ public class BaseResourceDefinitionTest {
     
     processor.process(null, serviceNode, "http://c6401.ambari.apache.org:8080/api/v1/clusters/c1/services");
 
-    String href = serviceNode.getProperty("href");
+    String href = serviceNode.getStringProperty("href");
 
     Assert.assertEquals("http://c6401.ambari.apache.org:8080/api/v1/clusters/c1/services/Service1", href);
 
@@ -121,7 +121,7 @@ public class BaseResourceDefinitionTest {
 
     processor.process(null, configGroupNode, "http://c6401.ambari.apache.org:8080/api/v1/clusters/c1/config_groups");
 
-    href = configGroupNode.getProperty("href");
+    href = configGroupNode.getStringProperty("href");
 
     Assert.assertEquals("http://c6401.ambari.apache.org:8080/api/v1/clusters/c1/config_groups/2", href);
   }

+ 48 - 0
ambari-server/src/test/java/org/apache/ambari/server/api/resources/HostKerberosIdentityResourceDefinitionTest.java

@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.api.resources;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.util.Set;
+
+/**
+ * HostKerberosIdentityResourceDefinition tests.
+ */
+public class HostKerberosIdentityResourceDefinitionTest {
+  @Test
+  public void testGetPluralName() throws Exception {
+    final HostKerberosIdentityResourceDefinition resourceDefinition = new HostKerberosIdentityResourceDefinition();
+    Assert.assertEquals("kerberos_identities", resourceDefinition.getPluralName());
+  }
+
+  @Test
+  public void testGetSingularName() throws Exception {
+    final HostKerberosIdentityResourceDefinition resourceDefinition = new HostKerberosIdentityResourceDefinition();
+    Assert.assertEquals("kerberos_identity", resourceDefinition.getSingularName());
+  }
+
+  @Test
+  public void testGetSubResourceDefinitions() throws Exception {
+    final HostKerberosIdentityResourceDefinition resourceDefinition = new HostKerberosIdentityResourceDefinition();
+    final Set<SubResourceDefinition> subResourceDefinitions = resourceDefinition.getSubResourceDefinitions ();
+    Assert.assertEquals(0, subResourceDefinitions.size());
+  }
+}

+ 2 - 1
ambari-server/src/test/java/org/apache/ambari/server/api/resources/HostResourceDefinitionTest.java

@@ -46,10 +46,11 @@ public class HostResourceDefinitionTest {
     final ResourceDefinition resource = new HostResourceDefinition();
     Set<SubResourceDefinition> subResources = resource.getSubResourceDefinitions();
 
-    assertEquals(3, subResources.size());
+    assertEquals(4, subResources.size());
     assertTrue(includesType(subResources, Resource.Type.HostComponent));
     assertTrue(includesType(subResources, Resource.Type.Alert));
     assertTrue(includesType(subResources, Resource.Type.HostStackVersion));
+    assertTrue(includesType(subResources, Resource.Type.HostKerberosIdentity));
   }
 
   private boolean includesType(Set<SubResourceDefinition> resources, Resource.Type type) {

+ 10 - 0
ambari-server/src/test/java/org/apache/ambari/server/api/resources/ResourceInstanceFactoryImplTest.java

@@ -47,4 +47,14 @@ public class ResourceInstanceFactoryImplTest {
     assertEquals("artifacts", resourceDefinition.getPluralName());
     assertEquals(Resource.Type.Artifact, resourceDefinition.getType());
   }
+
+  @Test
+  public void testGetHostKerberosIdentityDefinition() {
+    ResourceDefinition resourceDefinition = ResourceInstanceFactoryImpl.getResourceDefinition(
+        Resource.Type.HostKerberosIdentity, null);
+
+    assertEquals("kerberos_identity", resourceDefinition.getSingularName());
+    assertEquals("kerberos_identities", resourceDefinition.getPluralName());
+    assertEquals(Resource.Type.HostKerberosIdentity, resourceDefinition.getType());
+  }
 }

+ 93 - 0
ambari-server/src/test/java/org/apache/ambari/server/api/services/HostKerberosIdentityServiceTest.java

@@ -0,0 +1,93 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.ambari.server.api.services;
+
+
+import org.apache.ambari.server.api.resources.ResourceInstance;
+import org.apache.ambari.server.api.services.parsers.RequestBodyParser;
+import org.apache.ambari.server.api.services.serializers.ResultSerializer;
+
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.UriInfo;
+import java.lang.reflect.Method;
+import java.util.ArrayList;
+import java.util.List;
+
+import static org.junit.Assert.assertEquals;
+
+/**
+ * Unit tests for HostKerberosIdentity.
+ */
+public class HostKerberosIdentityServiceTest extends BaseServiceTest {
+
+  public List<ServiceTestInvocation> getTestInvocations() throws Exception {
+    List<ServiceTestInvocation> listInvocations = new ArrayList<ServiceTestInvocation>();
+
+    //getComponent
+    HostKerberosIdentityService service = new TestHostKerberosIdentityService("clusterName", "hostName", "identityId");
+    Method m = service.getClass().getMethod("getKerberosIdentity", String.class, HttpHeaders.class, UriInfo.class, String.class, String.class);
+    Object[] args = new Object[] {null, getHttpHeaders(), getUriInfo(), "identityId", null};
+    listInvocations.add(new ServiceTestInvocation(Request.Type.GET, service, m, args, null));
+
+    //getComponents
+    service = new TestHostKerberosIdentityService("clusterName", "hostName", null);
+    m = service.getClass().getMethod("getKerberosIdentities", String.class, HttpHeaders.class, UriInfo.class, String.class);
+    args = new Object[] {null, getHttpHeaders(), getUriInfo(), null};
+    listInvocations.add(new ServiceTestInvocation(Request.Type.GET, service, m, args, null));
+
+    return listInvocations;
+  }
+
+  private class TestHostKerberosIdentityService extends HostKerberosIdentityService {
+    private String clusterId;
+    private String hostId;
+    private String identityId;
+
+    private TestHostKerberosIdentityService(String clusterId, String hostId, String identityId) {
+      super(clusterId, hostId);
+      this.clusterId = clusterId;
+      this.hostId = hostId;
+      this.identityId = identityId;
+    }
+
+    @Override
+    ResourceInstance createResource(String clusterId, String hostId, String identityId) {
+      assertEquals(this.clusterId, clusterId);
+      assertEquals(this.hostId, hostId);
+      assertEquals(this.identityId, identityId);
+      return getTestResource();
+    }
+
+    @Override
+    RequestFactory getRequestFactory() {
+      return getTestRequestFactory();
+    }
+
+    @Override
+    protected RequestBodyParser getBodyParser() {
+      return getTestBodyParser();
+    }
+
+    @Override
+    protected ResultSerializer getResultSerializer() {
+      return getTestResultSerializer();
+    }
+  }
+}

+ 258 - 0
ambari-server/src/test/java/org/apache/ambari/server/api/services/serializers/CsvSerializerTest.java

@@ -0,0 +1,258 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.api.services.serializers;
+
+import org.apache.ambari.server.api.services.Result;
+import org.apache.ambari.server.api.services.ResultImpl;
+import org.apache.ambari.server.api.services.ResultStatus;
+import org.apache.ambari.server.api.util.TreeNode;
+import org.apache.ambari.server.controller.internal.ResourceImpl;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.commons.csv.CSVFormat;
+import org.apache.commons.csv.CSVParser;
+import org.apache.commons.csv.CSVRecord;
+import org.easymock.EasyMockSupport;
+import org.junit.Test;
+
+import java.io.StringReader;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * CsvSerializer unit tests
+ */
+public class CsvSerializerTest extends EasyMockSupport {
+
+  @Test
+  public void testSerializeResources_NoColumnInfo() throws Exception {
+    Result result = new ResultImpl(true);
+    result.setResultStatus(new ResultStatus(ResultStatus.STATUS.OK));
+    TreeNode<Resource> tree = result.getResultTree();
+
+    List<TreeMap<String, Object>> data = new ArrayList<TreeMap<String, Object>>() {
+      {
+        add(new TreeMap<String, Object>() {
+          {
+            put("property1", "value1a");
+            put("property2", "value2a");
+            put("property3", "value3a");
+            put("property4", "value4a");
+          }
+        });
+        add(new TreeMap<String, Object>() {
+          {
+            put("property1", "value1'b");
+            put("property2", "value2'b");
+            put("property3", "value3'b");
+            put("property4", "value4'b");
+          }
+        });
+        add(new TreeMap<String, Object>() {
+          {
+            put("property1", "value1,c");
+            put("property2", "value2,c");
+            put("property3", "value3,c");
+            put("property4", "value4,c");
+          }
+        });
+      }
+    };
+
+    tree.setName("items");
+    tree.setProperty("isCollection", "true");
+
+    addChildResource(tree, "resource", 0, data.get(0));
+    addChildResource(tree, "resource", 1, data.get(1));
+    addChildResource(tree, "resource", 2, data.get(2));
+
+    replayAll();
+
+    //execute test
+    Object o = new CsvSerializer().serialize(result).toString().replace("\r", "");
+
+    verifyAll();
+
+    assertNotNull(o);
+
+    StringReader reader = new StringReader(o.toString());
+    CSVParser csvParser = new CSVParser(reader, CSVFormat.DEFAULT);
+    List<CSVRecord> records = csvParser.getRecords();
+
+    assertNotNull(records);
+    assertEquals(3, records.size());
+
+    int i = 0;
+    for (CSVRecord record : records) {
+      TreeMap<String, Object> actualData = data.get(i++);
+      assertEquals(actualData.size(), record.size());
+
+      for (String item : record) {
+        assertTrue(actualData.containsValue(item));
+      }
+    }
+
+    csvParser.close();
+  }
+
+  @Test
+  public void testSerializeResources_HeaderInfo() throws Exception {
+    Result result = new ResultImpl(true);
+    result.setResultStatus(new ResultStatus(ResultStatus.STATUS.OK));
+    TreeNode<Resource> tree = result.getResultTree();
+    tree.setName("items");
+    tree.setProperty("isCollection", "true");
+    tree.setProperty(CsvSerializer.PROPERTY_COLUMN_MAP, new TreeMap<String, String>() {{
+      put("propertyD", "Property D");
+      put("propertyC", "Property C");
+      put("propertyB", "Property B");
+      put("propertyA", "Property A");
+    }});
+
+
+    List<Map<String, Object>> data = new ArrayList<Map<String, Object>>() {
+      {
+        add(new HashMap<String, Object>() {
+          {
+            put("propertyD", "value1a");
+            put("propertyC", "value2a");
+            put("propertyB", "value3a");
+            put("propertyA", "value4a");
+          }
+        });
+        add(new HashMap<String, Object>() {
+          {
+            put("propertyD", "value1'b");
+            put("propertyC", "value2'b");
+            put("propertyB", "value3'b");
+            put("propertyA", "value4'b");
+          }
+        });
+        add(new HashMap<String, Object>() {
+          {
+            put("propertyD", "value1,c");
+            put("propertyC", "value2,c");
+            put("propertyB", "value3,c");
+            put("propertyA", "value4,c");
+          }
+        });
+      }
+    };
+
+    addChildResource(tree, "resource", 0, data.get(0));
+    addChildResource(tree, "resource", 1, data.get(1));
+    addChildResource(tree, "resource", 2, data.get(2));
+
+    replayAll();
+
+    //execute test
+    Object o = new CsvSerializer().serialize(result).toString().replace("\r", "");
+
+    verifyAll();
+
+
+    String expected = "Property A,Property B,Property C,Property D\n" +
+        "value4a,value3a,value2a,value1a\n" +
+        "value4'b,value3'b,value2'b,value1'b\n" +
+        "\"value4,c\",\"value3,c\",\"value2,c\",\"value1,c\"\n";
+
+    assertEquals(expected, o);
+
+  }
+
+  @Test
+  public void testSerializeResources_HeaderOrderInfo() throws Exception {
+    Result result = new ResultImpl(true);
+    result.setResultStatus(new ResultStatus(ResultStatus.STATUS.OK));
+    TreeNode<Resource> tree = result.getResultTree();
+    tree.setName("items");
+    tree.setProperty("isCollection", "true");
+    tree.setProperty(CsvSerializer.PROPERTY_COLUMN_MAP, new HashMap<String, String>() {{
+      put("property1", "Property 1");
+      put("property2", "Property 2");
+      put("property3", "Property 3");
+      put("property4", "Property 4");
+    }});
+    tree.setProperty(CsvSerializer.PROPERTY_COLUMN_ORDER, Arrays.asList(
+        "property1",
+        "property2",
+        "property3",
+        "property4"));
+
+    addChildResource(tree, "resource", 0, new HashMap<String, Object>() {
+      {
+        put("property1", "value1a");
+        put("property2", "value2a");
+        put("property3", "value3a");
+        put("property4", "value4a");
+      }
+    });
+    addChildResource(tree, "resource", 1, new HashMap<String, Object>() {
+      {
+        put("property1", "value1'b");
+        put("property2", "value2'b");
+        put("property3", "value3'b");
+        put("property4", "value4'b");
+      }
+    });
+    addChildResource(tree, "resource", 2, new HashMap<String, Object>() {
+      {
+        put("property1", "value1,c");
+        put("property2", "value2,c");
+        put("property3", "value3,c");
+        put("property4", "value4,c");
+      }
+    });
+
+    replayAll();
+
+    //execute test
+    Object o = new CsvSerializer().serialize(result).toString().replace("\r", "");
+
+    String expected = "Property 1,Property 2,Property 3,Property 4\n" +
+        "value1a,value2a,value3a,value4a\n" +
+        "value1'b,value2'b,value3'b,value4'b\n" +
+        "\"value1,c\",\"value2,c\",\"value3,c\",\"value4,c\"\n";
+
+    assertEquals(expected, o);
+
+    verifyAll();
+  }
+
+
+  private void addChildResource(TreeNode<Resource> parent, String name, int index, final Map<String, Object> data) {
+    Resource resource = new ResourceImpl(Resource.Type.Cluster);
+
+    if (data != null) {
+      for (Map.Entry<String, Object> entry : data.entrySet()) {
+        resource.setProperty(entry.getKey(), entry.getValue());
+      }
+    }
+
+    parent.addChild(resource, String.format("%s:%d", name, index));
+  }
+
+}

+ 631 - 0
ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java

@@ -415,6 +415,409 @@ public class KerberosHelperTest extends EasyMockSupport {
     testDeleteTestIdentity(new KerberosCredential("principal", "password", "keytab"));
   }
 
+  @Test(expected = IllegalArgumentException.class)
+  public void testGetActiveIdentities_MissingCluster() throws Exception {
+    testGetActiveIdentities(null, null, null, null, true);
+  }
+
+  @Test
+  public void testGetActiveIdentities_All() throws Exception {
+    Map<String, Collection<KerberosIdentityDescriptor>> identities = testGetActiveIdentities("c1", null, null, null, true);
+
+    Assert.assertNotNull(identities);
+    Assert.assertEquals(2, identities.size());
+
+    Collection<KerberosIdentityDescriptor> hostIdentities;
+
+    hostIdentities = identities.get("host1");
+    Assert.assertNotNull(hostIdentities);
+    Assert.assertEquals(3, hostIdentities.size());
+
+    validateIdentities(hostIdentities, new HashMap<String, Map<String, Object>>() {{
+      put("identity1", new HashMap<String, Object>() {
+        {
+          put("principal_name", "component1/host1@EXAMPLE.COM");
+          put("principal_type", KerberosPrincipalType.SERVICE);
+          put("principal_configuration", "service1-site/component1.kerberos.principal");
+          put("principal_local_username", "service1");
+          put("keytab_file", "${keytab_dir}/service1.keytab");
+          put("keytab_owner_name", "service1");
+          put("keytab_owner_access", "rw");
+          put("keytab_group_name", "hadoop");
+          put("keytab_group_access", "");
+          put("keytab_configuration", "service1-site/component1.keytab.file");
+          put("keytab_cachable", false);
+        }
+      });
+
+      put("identity2", new HashMap<String, Object>() {
+        {
+          put("principal_name", "component2/host1@EXAMPLE.COM");
+          put("principal_type", KerberosPrincipalType.SERVICE);
+          put("principal_configuration", "service2-site/component2.kerberos.principal");
+          put("principal_local_username", "service2");
+          put("keytab_file", "${keytab_dir}/service2.keytab");
+          put("keytab_owner_name", "service2");
+          put("keytab_owner_access", "rw");
+          put("keytab_group_name", "hadoop");
+          put("keytab_group_access", "");
+          put("keytab_configuration", "service2-site/component2.keytab.file");
+          put("keytab_cachable", false);
+        }
+      });
+
+      put("identity3", new HashMap<String, Object>() {
+        {
+          put("principal_name", "service1/host1@EXAMPLE.COM");
+          put("principal_type", KerberosPrincipalType.SERVICE);
+          put("principal_configuration", "service1-site/service1.kerberos.principal");
+          put("principal_local_username", "service1");
+          put("keytab_file", "${keytab_dir}/service1.service.keytab");
+          put("keytab_owner_name", "service1");
+          put("keytab_owner_access", "rw");
+          put("keytab_group_name", "hadoop");
+          put("keytab_group_access", "");
+          put("keytab_configuration", "service1-site/service1.keytab.file");
+          put("keytab_cachable", false);
+        }
+      });
+    }});
+    
+    hostIdentities = identities.get("host2");
+    Assert.assertNotNull(hostIdentities);
+    Assert.assertEquals(3, hostIdentities.size());
+
+    validateIdentities(hostIdentities, new HashMap<String, Map<String, Object>>() {{
+      put("identity1", new HashMap<String, Object>() {
+        {
+          put("principal_name", "component1/host2@EXAMPLE.COM");
+          put("principal_type", KerberosPrincipalType.SERVICE);
+          put("principal_configuration", "service1-site/component1.kerberos.principal");
+          put("principal_local_username", "service1");
+          put("keytab_file", "${keytab_dir}/service1.keytab");
+          put("keytab_owner_name", "service1");
+          put("keytab_owner_access", "rw");
+          put("keytab_group_name", "hadoop");
+          put("keytab_group_access", "");
+          put("keytab_configuration", "service1-site/component1.keytab.file");
+          put("keytab_cachable", false);
+        }
+      });
+
+      put("identity2", new HashMap<String, Object>() {
+        {
+          put("principal_name", "component2/host2@EXAMPLE.COM");
+          put("principal_type", KerberosPrincipalType.SERVICE);
+          put("principal_configuration", "service2-site/component2.kerberos.principal");
+          put("principal_local_username", "service2");
+          put("keytab_file", "${keytab_dir}/service2.keytab");
+          put("keytab_owner_name", "service2");
+          put("keytab_owner_access", "rw");
+          put("keytab_group_name", "hadoop");
+          put("keytab_group_access", "");
+          put("keytab_configuration", "service2-site/component2.keytab.file");
+          put("keytab_cachable", false);
+        }
+      });
+
+      put("identity3", new HashMap<String, Object>() {
+        {
+          put("principal_name", "service1/host2@EXAMPLE.COM");
+          put("principal_type", KerberosPrincipalType.SERVICE);
+          put("principal_configuration", "service1-site/service1.kerberos.principal");
+          put("principal_local_username", "service1");
+          put("keytab_file", "${keytab_dir}/service1.service.keytab");
+          put("keytab_owner_name", "service1");
+          put("keytab_owner_access", "rw");
+          put("keytab_group_name", "hadoop");
+          put("keytab_group_access", "");
+          put("keytab_configuration", "service1-site/service1.keytab.file");
+          put("keytab_cachable", false);
+        }
+      });
+    }});
+  }
+
+  @Test
+  public void testGetActiveIdentities_SingleHost() throws Exception {
+    Map<String, Collection<KerberosIdentityDescriptor>> identities = testGetActiveIdentities("c1", "host1", null, null, true);
+
+    Assert.assertNotNull(identities);
+    Assert.assertEquals(1, identities.size());
+
+    Collection<KerberosIdentityDescriptor> hostIdentities;
+
+    hostIdentities = identities.get("host1");
+    Assert.assertNotNull(hostIdentities);
+    Assert.assertEquals(3, hostIdentities.size());
+
+    validateIdentities(hostIdentities, new HashMap<String, Map<String, Object>>() {{
+      put("identity1", new HashMap<String, Object>() {
+        {
+          put("principal_name", "component1/host1@EXAMPLE.COM");
+          put("principal_type", KerberosPrincipalType.SERVICE);
+          put("principal_configuration", "service1-site/component1.kerberos.principal");
+          put("principal_local_username", "service1");
+          put("keytab_file", "${keytab_dir}/service1.keytab");
+          put("keytab_owner_name", "service1");
+          put("keytab_owner_access", "rw");
+          put("keytab_group_name", "hadoop");
+          put("keytab_group_access", "");
+          put("keytab_configuration", "service1-site/component1.keytab.file");
+          put("keytab_cachable", false);
+        }
+      });
+
+      put("identity2", new HashMap<String, Object>() {
+        {
+          put("principal_name", "component2/host1@EXAMPLE.COM");
+          put("principal_type", KerberosPrincipalType.SERVICE);
+          put("principal_configuration", "service2-site/component2.kerberos.principal");
+          put("principal_local_username", "service2");
+          put("keytab_file", "${keytab_dir}/service2.keytab");
+          put("keytab_owner_name", "service2");
+          put("keytab_owner_access", "rw");
+          put("keytab_group_name", "hadoop");
+          put("keytab_group_access", "");
+          put("keytab_configuration", "service2-site/component2.keytab.file");
+          put("keytab_cachable", false);
+        }
+      });
+
+      put("identity3", new HashMap<String, Object>() {
+        {
+          put("principal_name", "service1/host1@EXAMPLE.COM");
+          put("principal_type", KerberosPrincipalType.SERVICE);
+          put("principal_configuration", "service1-site/service1.kerberos.principal");
+          put("principal_local_username", "service1");
+          put("keytab_file", "${keytab_dir}/service1.service.keytab");
+          put("keytab_owner_name", "service1");
+          put("keytab_owner_access", "rw");
+          put("keytab_group_name", "hadoop");
+          put("keytab_group_access", "");
+          put("keytab_configuration", "service1-site/service1.keytab.file");
+          put("keytab_cachable", false);
+        }
+      });
+    }});
+  }
+
+  @Test
+  public void testGetActiveIdentities_SingleService() throws Exception {
+    Map<String, Collection<KerberosIdentityDescriptor>> identities = testGetActiveIdentities("c1", null, "SERVICE1", null, true);
+
+    Assert.assertNotNull(identities);
+    Assert.assertEquals(2, identities.size());
+
+    Collection<KerberosIdentityDescriptor> hostIdentities;
+
+    hostIdentities = identities.get("host1");
+    Assert.assertNotNull(hostIdentities);
+    Assert.assertEquals(2, hostIdentities.size());
+
+    validateIdentities(hostIdentities, new HashMap<String, Map<String, Object>>() {{
+      put("identity1", new HashMap<String, Object>() {
+        {
+          put("principal_name", "component1/host1@EXAMPLE.COM");
+          put("principal_type", KerberosPrincipalType.SERVICE);
+          put("principal_configuration", "service1-site/component1.kerberos.principal");
+          put("principal_local_username", "service1");
+          put("keytab_file", "${keytab_dir}/service1.keytab");
+          put("keytab_owner_name", "service1");
+          put("keytab_owner_access", "rw");
+          put("keytab_group_name", "hadoop");
+          put("keytab_group_access", "");
+          put("keytab_configuration", "service1-site/component1.keytab.file");
+          put("keytab_cachable", false);
+        }
+      });
+
+      put("identity3", new HashMap<String, Object>() {
+        {
+          put("principal_name", "service1/host1@EXAMPLE.COM");
+          put("principal_type", KerberosPrincipalType.SERVICE);
+          put("principal_configuration", "service1-site/service1.kerberos.principal");
+          put("principal_local_username", "service1");
+          put("keytab_file", "${keytab_dir}/service1.service.keytab");
+          put("keytab_owner_name", "service1");
+          put("keytab_owner_access", "rw");
+          put("keytab_group_name", "hadoop");
+          put("keytab_group_access", "");
+          put("keytab_configuration", "service1-site/service1.keytab.file");
+          put("keytab_cachable", false);
+        }
+      });
+    }});
+    
+    hostIdentities = identities.get("host2");
+    Assert.assertNotNull(hostIdentities);
+    Assert.assertEquals(2, hostIdentities.size());
+
+    validateIdentities(hostIdentities, new HashMap<String, Map<String, Object>>() {{
+      put("identity1", new HashMap<String, Object>() {
+        {
+          put("principal_name", "component1/host2@EXAMPLE.COM");
+          put("principal_type", KerberosPrincipalType.SERVICE);
+          put("principal_configuration", "service1-site/component1.kerberos.principal");
+          put("principal_local_username", "service1");
+          put("keytab_file", "${keytab_dir}/service1.keytab");
+          put("keytab_owner_name", "service1");
+          put("keytab_owner_access", "rw");
+          put("keytab_group_name", "hadoop");
+          put("keytab_group_access", "");
+          put("keytab_configuration", "service1-site/component1.keytab.file");
+          put("keytab_cachable", false);
+        }
+      });
+
+      put("identity3", new HashMap<String, Object>() {
+        {
+          put("principal_name", "service1/host2@EXAMPLE.COM");
+          put("principal_type", KerberosPrincipalType.SERVICE);
+          put("principal_configuration", "service1-site/service1.kerberos.principal");
+          put("principal_local_username", "service1");
+          put("keytab_file", "${keytab_dir}/service1.service.keytab");
+          put("keytab_owner_name", "service1");
+          put("keytab_owner_access", "rw");
+          put("keytab_group_name", "hadoop");
+          put("keytab_group_access", "");
+          put("keytab_configuration", "service1-site/service1.keytab.file");
+          put("keytab_cachable", false);
+        }
+      });
+    }});  }
+
+  @Test
+  public void testGetActiveIdentities_SingleServiceSingleHost() throws Exception {
+    Map<String, Collection<KerberosIdentityDescriptor>> identities = testGetActiveIdentities("c1", "host2", "SERVICE1", null, true);
+
+    Assert.assertNotNull(identities);
+    Assert.assertEquals(1, identities.size());
+
+    Collection<KerberosIdentityDescriptor> hostIdentities;
+
+    hostIdentities = identities.get("host2");
+    Assert.assertNotNull(hostIdentities);
+    Assert.assertEquals(2, hostIdentities.size());
+
+    validateIdentities(hostIdentities, new HashMap<String, Map<String, Object>>() {{
+      put("identity1", new HashMap<String, Object>() {
+        {
+          put("principal_name", "component1/host2@EXAMPLE.COM");
+          put("principal_type", KerberosPrincipalType.SERVICE);
+          put("principal_configuration", "service1-site/component1.kerberos.principal");
+          put("principal_local_username", "service1");
+          put("keytab_file", "${keytab_dir}/service1.keytab");
+          put("keytab_owner_name", "service1");
+          put("keytab_owner_access", "rw");
+          put("keytab_group_name", "hadoop");
+          put("keytab_group_access", "");
+          put("keytab_configuration", "service1-site/component1.keytab.file");
+          put("keytab_cachable", false);
+        }
+      });
+
+      put("identity3", new HashMap<String, Object>() {
+        {
+          put("principal_name", "service1/host2@EXAMPLE.COM");
+          put("principal_type", KerberosPrincipalType.SERVICE);
+          put("principal_configuration", "service1-site/service1.kerberos.principal");
+          put("principal_local_username", "service1");
+          put("keytab_file", "${keytab_dir}/service1.service.keytab");
+          put("keytab_owner_name", "service1");
+          put("keytab_owner_access", "rw");
+          put("keytab_group_name", "hadoop");
+          put("keytab_group_access", "");
+          put("keytab_configuration", "service1-site/service1.keytab.file");
+          put("keytab_cachable", false);
+        }
+      });
+    }});
+  }
+
+  @Test
+  public void testGetActiveIdentities_SingleComponent() throws Exception {
+    Map<String, Collection<KerberosIdentityDescriptor>> identities = testGetActiveIdentities("c1", null, null, "COMPONENT2", true);
+
+    Assert.assertNotNull(identities);
+    Assert.assertEquals(2, identities.size());
+
+    Collection<KerberosIdentityDescriptor> hostIdentities;
+
+    hostIdentities = identities.get("host1");
+    Assert.assertNotNull(hostIdentities);
+    Assert.assertEquals(1, hostIdentities.size());
+
+    validateIdentities(hostIdentities, new HashMap<String, Map<String, Object>>() {{
+      put("identity2", new HashMap<String, Object>() {
+        {
+          put("principal_name", "component2/host1@EXAMPLE.COM");
+          put("principal_type", KerberosPrincipalType.SERVICE);
+          put("principal_configuration", "service2-site/component2.kerberos.principal");
+          put("principal_local_username", "service2");
+          put("keytab_file", "${keytab_dir}/service2.keytab");
+          put("keytab_owner_name", "service2");
+          put("keytab_owner_access", "rw");
+          put("keytab_group_name", "hadoop");
+          put("keytab_group_access", "");
+          put("keytab_configuration", "service2-site/component2.keytab.file");
+          put("keytab_cachable", false);
+        }
+      });
+    }});
+
+    hostIdentities = identities.get("host2");
+    Assert.assertNotNull(hostIdentities);
+    Assert.assertEquals(1, hostIdentities.size());
+
+    validateIdentities(hostIdentities, new HashMap<String, Map<String, Object>>() {{
+      put("identity2", new HashMap<String, Object>() {
+        {
+          put("principal_name", "component2/host2@EXAMPLE.COM");
+          put("principal_type", KerberosPrincipalType.SERVICE);
+          put("principal_configuration", "service2-site/component2.kerberos.principal");
+          put("principal_local_username", "service2");
+          put("keytab_file", "${keytab_dir}/service2.keytab");
+          put("keytab_owner_name", "service2");
+          put("keytab_owner_access", "rw");
+          put("keytab_group_name", "hadoop");
+          put("keytab_group_access", "");
+          put("keytab_configuration", "service2-site/component2.keytab.file");
+          put("keytab_cachable", false);
+        }
+      });
+    }});
+  }
+
+  private void validateIdentities(Collection<KerberosIdentityDescriptor> identities, HashMap<String, Map<String, Object>> expectedDataMap) {
+
+    Assert.assertEquals(expectedDataMap.size(), identities.size());
+
+    for(KerberosIdentityDescriptor identity: identities) {
+      Map<String, Object> expectedData = expectedDataMap.get(identity.getName());
+
+      Assert.assertNotNull(expectedData);
+
+      KerberosPrincipalDescriptor principal = identity.getPrincipalDescriptor();
+      Assert.assertNotNull(principal);
+      Assert.assertEquals(expectedData.get("principal_name"), principal.getName());
+      Assert.assertEquals(expectedData.get("principal_type"), principal.getType());
+      Assert.assertEquals(expectedData.get("principal_configuration"), principal.getConfiguration());
+      Assert.assertEquals(expectedData.get("principal_local_username"), principal.getLocalUsername());
+
+      KerberosKeytabDescriptor keytab = identity.getKeytabDescriptor();
+      Assert.assertNotNull(keytab);
+      Assert.assertEquals(expectedData.get("keytab_file"), keytab.getFile());
+      Assert.assertEquals(expectedData.get("keytab_owner_name"), keytab.getOwnerName());
+      Assert.assertEquals(expectedData.get("keytab_owner_access"), keytab.getOwnerAccess());
+      Assert.assertEquals(expectedData.get("keytab_group_name"), keytab.getGroupName());
+      Assert.assertEquals(expectedData.get("keytab_group_access"), keytab.getGroupAccess());
+      Assert.assertEquals(expectedData.get("keytab_configuration"), keytab.getConfiguration());
+      Assert.assertEquals(Boolean.TRUE.equals(expectedData.get("keytab_cachable")), keytab.isCachable());
+    }
+  }
+
+
   private void testEnableKerberos(final KerberosCredential kerberosCredential,
                                   boolean getClusterDescriptor,
                                   boolean getStackDescriptor) throws Exception {
@@ -2473,4 +2876,232 @@ public class KerberosHelperTest extends EasyMockSupport {
 
     verifyAll();
   }
+
+  private Map<String, Collection<KerberosIdentityDescriptor>> testGetActiveIdentities(String clusterName, String hostName, String serviceName, String compnentName, boolean replaceHostnames) throws Exception {
+
+    KerberosHelper kerberosHelper = injector.getInstance(KerberosHelper.class);
+
+    final ServiceComponentHost schKerberosClient1 = createMock(ServiceComponentHost.class);
+    expect(schKerberosClient1.getServiceName()).andReturn(Service.Type.KERBEROS.name()).anyTimes();
+    expect(schKerberosClient1.getServiceComponentName()).andReturn(Role.KERBEROS_CLIENT.name()).anyTimes();
+
+    final ServiceComponentHost schKerberosClient2 = createMock(ServiceComponentHost.class);
+    expect(schKerberosClient2.getServiceName()).andReturn(Service.Type.KERBEROS.name()).anyTimes();
+    expect(schKerberosClient2.getServiceComponentName()).andReturn(Role.KERBEROS_CLIENT.name()).anyTimes();
+
+    final ServiceComponentHost sch1a = createMock(ServiceComponentHost.class);
+    expect(sch1a.getServiceName()).andReturn("SERVICE1").anyTimes();
+    expect(sch1a.getServiceComponentName()).andReturn("COMPONENT1").anyTimes();
+
+    final ServiceComponentHost sch1b = createMock(ServiceComponentHost.class);
+    expect(sch1b.getServiceName()).andReturn("SERVICE2").anyTimes();
+    expect(sch1b.getServiceComponentName()).andReturn("COMPONENT2").anyTimes();
+
+    final ServiceComponentHost sch2a = createMock(ServiceComponentHost.class);
+    expect(sch2a.getServiceName()).andReturn("SERVICE1").anyTimes();
+    expect(sch2a.getServiceComponentName()).andReturn("COMPONENT1").anyTimes();
+
+    final ServiceComponentHost sch2b = createMock(ServiceComponentHost.class);
+    expect(sch2b.getServiceName()).andReturn("SERVICE2").anyTimes();
+    expect(sch2b.getServiceComponentName()).andReturn("COMPONENT2").anyTimes();
+
+    final Host host1 = createNiceMock(Host.class);
+    expect(host1.getHostName()).andReturn("host1").anyTimes();
+    expect(host1.getState()).andReturn(HostState.HEALTHY).anyTimes();
+
+    final Host host2 = createNiceMock(Host.class);
+    expect(host2.getHostName()).andReturn("host2").anyTimes();
+    expect(host2.getState()).andReturn(HostState.HEALTHY).anyTimes();
+
+    final ServiceComponent serviceComponentKerberosClient = createMock(ServiceComponent.class);
+    expect(serviceComponentKerberosClient.getName()).andReturn(Role.KERBEROS_CLIENT.name()).anyTimes();
+    expect(serviceComponentKerberosClient.getServiceComponentHosts()).andReturn(Collections.singletonMap("host1", schKerberosClient1)).anyTimes();
+
+    final Service serviceKerberos = createStrictMock(Service.class);
+    expect(serviceKerberos.getName()).andReturn(Service.Type.KERBEROS.name()).anyTimes();
+    expect(serviceKerberos.getServiceComponents())
+        .andReturn(Collections.singletonMap(Role.KERBEROS_CLIENT.name(), serviceComponentKerberosClient))
+        .anyTimes();
+
+    final Service service1 = createStrictMock(Service.class);
+    expect(service1.getName()).andReturn("SERVICE1").anyTimes();
+    expect(service1.getServiceComponents())
+        .andReturn(Collections.<String, ServiceComponent>emptyMap())
+        .anyTimes();
+
+    final Service service2 = createStrictMock(Service.class);
+    expect(service2.getName()).andReturn("SERVICE2").anyTimes();
+    expect(service2.getServiceComponents())
+        .andReturn(Collections.<String, ServiceComponent>emptyMap())
+        .anyTimes();
+
+    final Cluster cluster = createMock(Cluster.class);
+    expect(cluster.getSecurityType()).andReturn(SecurityType.KERBEROS).anyTimes();
+    expect(cluster.getClusterName()).andReturn(clusterName).anyTimes();
+    expect(cluster.getServiceComponentHosts("host1"))
+        .andReturn(new ArrayList<ServiceComponentHost>() {
+          {
+            add(schKerberosClient1);
+            add(sch1a);
+            add(sch1b);
+          }
+        })
+        .anyTimes();
+    expect(cluster.getServiceComponentHosts("host2"))
+        .andReturn(new ArrayList<ServiceComponentHost>() {
+          {
+            add(schKerberosClient2);
+            add(sch2a);
+            add(sch2b);
+          }
+        })
+        .anyTimes();
+    expect(cluster.getCurrentStackVersion())
+        .andReturn(new StackId("HDP", "2.2"))
+        .anyTimes();
+    expect(cluster.getServices())
+        .andReturn(new HashMap<String, Service>() {
+          {
+            put(Service.Type.KERBEROS.name(), serviceKerberos);
+            put("SERVICE1", service1);
+            put("SERVICE2", service2);
+          }
+        })
+        .anyTimes();
+
+    final Clusters clusters = injector.getInstance(Clusters.class);
+    expect(clusters.getCluster(clusterName)).andReturn(cluster).times(1);
+
+    if(hostName == null) {
+      expect(clusters.getHostsForCluster(clusterName))
+          .andReturn(new HashMap<String, Host>() {
+            {
+              put("host1", host1);
+              put("host2", host2);
+            }
+          })
+          .once();
+    }
+
+    final AmbariManagementController ambariManagementController = injector.getInstance(AmbariManagementController.class);
+    expect(ambariManagementController.findConfigurationTagsWithOverrides(cluster, "host1"))
+        .andReturn(Collections.<String, Map<String, String>>emptyMap())
+        .anyTimes();
+    expect(ambariManagementController.findConfigurationTagsWithOverrides(cluster, "host2"))
+        .andReturn(Collections.<String, Map<String, String>>emptyMap())
+        .anyTimes();
+    expect(ambariManagementController.findConfigurationTagsWithOverrides(cluster, null))
+        .andReturn(Collections.<String, Map<String, String>>emptyMap())
+        .anyTimes();
+
+    final ConfigHelper configHelper = injector.getInstance(ConfigHelper.class);
+    expect(configHelper.getEffectiveConfigProperties(anyObject(Cluster.class), anyObject(Map.class)))
+        .andReturn(new HashMap<String, Map<String, String>>() {
+          {
+            put("cluster-env", new HashMap<String, String>() {{
+              put("kerberos_domain", "FOOBAR.COM");
+            }});
+          }
+        })
+        .anyTimes();
+
+    final KerberosPrincipalDescriptor principalDescriptor1 = createMock(KerberosPrincipalDescriptor.class);
+    expect(principalDescriptor1.getValue()).andReturn("component1/_HOST@${realm}").anyTimes();
+    expect(principalDescriptor1.getType()).andReturn(KerberosPrincipalType.SERVICE).anyTimes();
+    expect(principalDescriptor1.getConfiguration()).andReturn("service1-site/component1.kerberos.principal").anyTimes();
+    expect(principalDescriptor1.getLocalUsername()).andReturn("service1").anyTimes();
+
+    final KerberosPrincipalDescriptor principalDescriptor2 = createMock(KerberosPrincipalDescriptor.class);
+    expect(principalDescriptor2.getValue()).andReturn("component2/${host}@${realm}").anyTimes();
+    expect(principalDescriptor2.getType()).andReturn(KerberosPrincipalType.SERVICE).anyTimes();
+    expect(principalDescriptor2.getConfiguration()).andReturn("service2-site/component2.kerberos.principal").anyTimes();
+    expect(principalDescriptor2.getLocalUsername()).andReturn("service2").anyTimes();
+
+    final KerberosPrincipalDescriptor principalDescriptorService1 = createMock(KerberosPrincipalDescriptor.class);
+    expect(principalDescriptorService1.getValue()).andReturn("service1/_HOST@${realm}").anyTimes();
+    expect(principalDescriptorService1.getType()).andReturn(KerberosPrincipalType.SERVICE).anyTimes();
+    expect(principalDescriptorService1.getConfiguration()).andReturn("service1-site/service1.kerberos.principal").anyTimes();
+    expect(principalDescriptorService1.getLocalUsername()).andReturn("service1").anyTimes();
+
+    final KerberosKeytabDescriptor keytabDescriptor1 = createMock(KerberosKeytabDescriptor.class);
+    expect(keytabDescriptor1.getFile()).andReturn("${keytab_dir}/service1.keytab").anyTimes();
+    expect(keytabDescriptor1.getOwnerName()).andReturn("service1").anyTimes();
+    expect(keytabDescriptor1.getOwnerAccess()).andReturn("rw").anyTimes();
+    expect(keytabDescriptor1.getGroupName()).andReturn("hadoop").anyTimes();
+    expect(keytabDescriptor1.getGroupAccess()).andReturn("").anyTimes();
+    expect(keytabDescriptor1.getConfiguration()).andReturn("service1-site/component1.keytab.file").anyTimes();
+    expect(keytabDescriptor1.isCachable()).andReturn(false).anyTimes();
+
+    final KerberosKeytabDescriptor keytabDescriptor2 = createMock(KerberosKeytabDescriptor.class);
+    expect(keytabDescriptor2.getFile()).andReturn("${keytab_dir}/service2.keytab").anyTimes();
+    expect(keytabDescriptor2.getOwnerName()).andReturn("service2").anyTimes();
+    expect(keytabDescriptor2.getOwnerAccess()).andReturn("rw").anyTimes();
+    expect(keytabDescriptor2.getGroupName()).andReturn("hadoop").anyTimes();
+    expect(keytabDescriptor2.getGroupAccess()).andReturn("").anyTimes();
+    expect(keytabDescriptor2.getConfiguration()).andReturn("service2-site/component2.keytab.file").anyTimes();
+    expect(keytabDescriptor2.isCachable()).andReturn(false).anyTimes();
+
+    final KerberosKeytabDescriptor keytabDescriptorService1 = createMock(KerberosKeytabDescriptor.class);
+    expect(keytabDescriptorService1.getFile()).andReturn("${keytab_dir}/service1.service.keytab").anyTimes();
+    expect(keytabDescriptorService1.getOwnerName()).andReturn("service1").anyTimes();
+    expect(keytabDescriptorService1.getOwnerAccess()).andReturn("rw").anyTimes();
+    expect(keytabDescriptorService1.getGroupName()).andReturn("hadoop").anyTimes();
+    expect(keytabDescriptorService1.getGroupAccess()).andReturn("").anyTimes();
+    expect(keytabDescriptorService1.getConfiguration()).andReturn("service1-site/service1.keytab.file").anyTimes();
+    expect(keytabDescriptorService1.isCachable()).andReturn(false).anyTimes();
+
+    final KerberosIdentityDescriptor identityDescriptor1 = createMock(KerberosIdentityDescriptor.class);
+    expect(identityDescriptor1.getName()).andReturn("identity1").anyTimes();
+    expect(identityDescriptor1.getPrincipalDescriptor()).andReturn(principalDescriptor1).anyTimes();
+    expect(identityDescriptor1.getKeytabDescriptor()).andReturn(keytabDescriptor1).anyTimes();
+
+    final KerberosIdentityDescriptor identityDescriptor2 = createMock(KerberosIdentityDescriptor.class);
+    expect(identityDescriptor2.getName()).andReturn("identity2").anyTimes();
+    expect(identityDescriptor2.getPrincipalDescriptor()).andReturn(principalDescriptor2).anyTimes();
+    expect(identityDescriptor2.getKeytabDescriptor()).andReturn(keytabDescriptor2).anyTimes();
+
+    final KerberosIdentityDescriptor identityDescriptorService1 = createMock(KerberosIdentityDescriptor.class);
+    expect(identityDescriptorService1.getName()).andReturn("identity3").anyTimes();
+    expect(identityDescriptorService1.getPrincipalDescriptor()).andReturn(principalDescriptorService1).anyTimes();
+    expect(identityDescriptorService1.getKeytabDescriptor()).andReturn(keytabDescriptorService1).anyTimes();
+
+    final KerberosComponentDescriptor componentDescriptor1 = createMock(KerberosComponentDescriptor.class);
+    expect(componentDescriptor1.getIdentities(true)).andReturn(Collections.singletonList(identityDescriptor1)).anyTimes();
+
+    final KerberosComponentDescriptor componentDescriptor2 = createMock(KerberosComponentDescriptor.class);
+    expect(componentDescriptor2.getIdentities(true)).andReturn(Collections.singletonList(identityDescriptor2)).anyTimes();
+
+    final KerberosServiceDescriptor serviceDescriptor1 = createMock(KerberosServiceDescriptor.class);
+    expect(serviceDescriptor1.getComponent("COMPONENT1")).andReturn(componentDescriptor1).anyTimes();
+    expect(serviceDescriptor1.getIdentities(true)).andReturn(Collections.singletonList(identityDescriptorService1)).anyTimes();
+
+    final KerberosServiceDescriptor serviceDescriptor2 = createMock(KerberosServiceDescriptor.class);
+    expect(serviceDescriptor2.getComponent("COMPONENT2")).andReturn(componentDescriptor2).anyTimes();
+    expect(serviceDescriptor2.getIdentities(true)).andReturn(null).anyTimes();
+
+    final KerberosDescriptor kerberosDescriptor = createMock(KerberosDescriptor.class);
+    expect(kerberosDescriptor.getProperties()).andReturn(new HashMap<String, String>(){
+      {
+        put("realm", "EXAMPLE.COM");
+      }
+    }).anyTimes();
+    expect(kerberosDescriptor.getService("KERBEROS")).andReturn(null).anyTimes();
+    expect(kerberosDescriptor.getService("SERVICE1")).andReturn(serviceDescriptor1).anyTimes();
+    expect(kerberosDescriptor.getService("SERVICE2")).andReturn(serviceDescriptor2).anyTimes();
+
+    setupGetDescriptorFromCluster(kerberosDescriptor);
+
+    replayAll();
+
+    // Needed by infrastructure
+    metaInfo.init();
+
+    Map<String, Collection<KerberosIdentityDescriptor>> identities;
+    identities = kerberosHelper.getActiveIdentities(clusterName, hostName, serviceName, compnentName, replaceHostnames);
+
+    verifyAll();
+
+    return identities;
+  }
+
 }

+ 362 - 0
ambari-server/src/test/java/org/apache/ambari/server/controller/internal/HostKerberosIdentityResourceProviderTest.java

@@ -0,0 +1,362 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.controller.internal;
+
+import org.apache.ambari.server.controller.AmbariManagementController;
+import org.apache.ambari.server.controller.KerberosHelper;
+import org.apache.ambari.server.controller.spi.Predicate;
+import org.apache.ambari.server.controller.spi.Request;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.ResourceProvider;
+import org.apache.ambari.server.controller.utilities.PredicateBuilder;
+import org.apache.ambari.server.controller.utilities.PropertyHelper;
+import org.apache.ambari.server.orm.dao.HostDAO;
+import org.apache.ambari.server.orm.dao.KerberosPrincipalDAO;
+import org.apache.ambari.server.orm.dao.KerberosPrincipalHostDAO;
+import org.apache.ambari.server.orm.entities.HostEntity;
+import org.apache.ambari.server.state.kerberos.KerberosIdentityDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosKeytabDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosPrincipalDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosPrincipalType;
+import org.easymock.EasyMockSupport;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.lang.reflect.Field;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.Set;
+
+import static org.easymock.EasyMock.expect;
+
+
+/**
+ * Tests for the host Kerberos identity resource provider.
+ */
+public class HostKerberosIdentityResourceProviderTest extends EasyMockSupport {
+  @Test(expected = org.apache.ambari.server.controller.spi.SystemException.class)
+  public void testCreateResources() throws Exception {
+    AmbariManagementController managementController = createMock(AmbariManagementController.class);
+
+    ResourceProvider provider = new HostKerberosIdentityResourceProvider(managementController);
+
+    // Create a property set of an single empty map.  It shouldn't make a difference what this is
+    // since this HostKerberosIdentityResourceProvider is a read-only provider and should throw
+    // a org.apache.ambari.server.controller.spi.SystemException exception
+    Set<Map<String, Object>> propertySet = Collections.singleton(Collections.<String, Object>emptyMap());
+
+    Request request = PropertyHelper.getCreateRequest(propertySet, null);
+
+    provider.createResources(request);
+  }
+
+  @Test(expected = org.apache.ambari.server.controller.spi.SystemException.class)
+  public void testUpdateResources() throws Exception {
+
+    AmbariManagementController managementController = createMock(AmbariManagementController.class);
+
+    Map<String, String> mapRequestProps = new HashMap<String, String>();
+    mapRequestProps.put("context", "Called from a test");
+
+    ResourceProvider provider = new HostKerberosIdentityResourceProvider(managementController);
+
+    Map<String, Object> properties = new LinkedHashMap<String, Object>();
+
+    properties.put(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_CLUSTER_NAME_PROPERTY_ID, "Cluster100");
+    properties.put(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_HOST_NAME_PROPERTY_ID, "Host100");
+    properties.put(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_PRINCIPAL_NAME_PROPERTY_ID, "principal@REALM");
+    properties.put(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_PRINCIPAL_LOCAL_USERNAME_PROPERTY_ID, "userA");
+
+    // create the request
+    Request request = PropertyHelper.getUpdateRequest(properties, mapRequestProps);
+
+    Predicate predicate = new PredicateBuilder()
+        .property(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_CLUSTER_NAME_PROPERTY_ID)
+        .equals("Cluster100")
+        .and()
+        .property(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_HOST_NAME_PROPERTY_ID)
+        .equals("Host100")
+        .and()
+        .property(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_PRINCIPAL_NAME_PROPERTY_ID)
+        .equals("principal@REALM").toPredicate();
+
+    provider.updateResources(request, predicate);
+  }
+
+  @Test(expected = org.apache.ambari.server.controller.spi.SystemException.class)
+  public void testDeleteResources() throws Exception {
+    AmbariManagementController managementController = createMock(AmbariManagementController.class);
+
+    ResourceProvider provider = new HostKerberosIdentityResourceProvider(managementController);
+
+    AbstractResourceProviderTest.TestObserver observer = new AbstractResourceProviderTest.TestObserver();
+
+    ((ObservableResourceProvider) provider).addObserver(observer);
+
+    Predicate predicate = new PredicateBuilder()
+        .property(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_CLUSTER_NAME_PROPERTY_ID)
+        .equals("Cluster100")
+        .and()
+        .property(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_HOST_NAME_PROPERTY_ID)
+        .equals("Host100")
+        .and()
+        .property(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_PRINCIPAL_NAME_PROPERTY_ID)
+        .equals("principal@REALM").toPredicate();
+
+    provider.deleteResources(predicate);
+  }
+
+
+  @Test
+  public void testGetResources() throws Exception {
+
+    AmbariManagementController managementController = createMock(AmbariManagementController.class);
+
+    KerberosPrincipalDescriptor principalDescriptor1 = createStrictMock(KerberosPrincipalDescriptor.class);
+    expect(principalDescriptor1.getValue()).andReturn("principal1@EXAMPLE.COM");
+    expect(principalDescriptor1.getType()).andReturn(KerberosPrincipalType.USER).times(1);
+    expect(principalDescriptor1.getLocalUsername()).andReturn("principal1");
+
+    KerberosKeytabDescriptor keytabDescriptor1 = createStrictMock(KerberosKeytabDescriptor.class);
+    expect(keytabDescriptor1.getOwnerAccess()).andReturn("rw").times(1);
+    expect(keytabDescriptor1.getGroupAccess()).andReturn("r").times(1);
+    expect(keytabDescriptor1.getFile()).andReturn("/etc/security/keytabs/principal1.headless.keytab").times(1);
+    expect(keytabDescriptor1.getOwnerName()).andReturn("principal1").times(1);
+    expect(keytabDescriptor1.getGroupName()).andReturn("principal1").times(1);
+
+    KerberosIdentityDescriptor identity1 = createStrictMock(KerberosIdentityDescriptor.class);
+    expect(identity1.getPrincipalDescriptor()).andReturn(principalDescriptor1).times(1);
+    expect(identity1.getKeytabDescriptor()).andReturn(keytabDescriptor1).times(1);
+    expect(identity1.getName()).andReturn("identity1").times(1);
+
+    KerberosPrincipalDescriptor principalDescriptor2 = createStrictMock(KerberosPrincipalDescriptor.class);
+    expect(principalDescriptor2.getValue()).andReturn("principal2/Host100@EXAMPLE.COM");
+    expect(principalDescriptor2.getType()).andReturn(KerberosPrincipalType.SERVICE).times(1);
+    expect(principalDescriptor2.getLocalUsername()).andReturn("principal2");
+
+    KerberosIdentityDescriptor identity2 = createStrictMock(KerberosIdentityDescriptor.class);
+    expect(identity2.getPrincipalDescriptor()).andReturn(principalDescriptor2).times(1);
+    expect(identity2.getKeytabDescriptor()).andReturn(null).times(1);
+    expect(identity2.getName()).andReturn("identity2").times(1);
+
+    KerberosIdentityDescriptor identity3 = createStrictMock(KerberosIdentityDescriptor.class);
+    expect(identity3.getPrincipalDescriptor()).andReturn(null).times(1);
+
+    KerberosIdentityDescriptor identity4 = createStrictMock(KerberosIdentityDescriptor.class);
+    expect(identity4.getPrincipalDescriptor()).andReturn(null).times(1);
+
+    KerberosPrincipalDescriptor principalDescriptor5 = createStrictMock(KerberosPrincipalDescriptor.class);
+    expect(principalDescriptor5.getValue()).andReturn("principal5@EXAMPLE.COM");
+    expect(principalDescriptor5.getType()).andReturn(KerberosPrincipalType.USER).times(1);
+    expect(principalDescriptor5.getLocalUsername()).andReturn("principal5");
+
+    KerberosKeytabDescriptor keytabDescriptor5 = createStrictMock(KerberosKeytabDescriptor.class);
+    expect(keytabDescriptor5.getOwnerAccess()).andReturn("r").times(1);
+    expect(keytabDescriptor5.getGroupAccess()).andReturn("r").times(1);
+    expect(keytabDescriptor5.getFile()).andReturn("/etc/security/keytabs/principal5.headless.keytab").times(1);
+    expect(keytabDescriptor5.getOwnerName()).andReturn("principal5").times(1);
+    expect(keytabDescriptor5.getGroupName()).andReturn("hadoop").times(1);
+
+    KerberosIdentityDescriptor identity5 = createStrictMock(KerberosIdentityDescriptor.class);
+    expect(identity5.getPrincipalDescriptor()).andReturn(principalDescriptor5).times(1);
+    expect(identity5.getKeytabDescriptor()).andReturn(keytabDescriptor5).times(1);
+    expect(identity5.getName()).andReturn("identity5").times(1);
+
+    KerberosPrincipalDAO kerberosPrincipalDAO = createStrictMock(KerberosPrincipalDAO.class);
+    expect(kerberosPrincipalDAO.exists("principal1@EXAMPLE.COM")).andReturn(true).times(1);
+    expect(kerberosPrincipalDAO.exists("principal2/Host100@EXAMPLE.COM")).andReturn(true).times(1);
+    expect(kerberosPrincipalDAO.exists("principal5@EXAMPLE.COM")).andReturn(false).times(1);
+
+    KerberosPrincipalHostDAO kerberosPrincipalHostDAO = createStrictMock(KerberosPrincipalHostDAO.class);
+    expect(kerberosPrincipalHostDAO.exists("principal1@EXAMPLE.COM", 100L)).andReturn(true).times(1);
+    expect(kerberosPrincipalHostDAO.exists("principal2/Host100@EXAMPLE.COM", 100L)).andReturn(false).times(1);
+
+    HostEntity host100 = createStrictMock(HostEntity.class);
+    expect(host100.getHostId()).andReturn(100L).times(1);
+
+    HostDAO hostDAO = createStrictMock(HostDAO.class);
+    expect(hostDAO.findByName("Host100")).andReturn(host100).times(1);
+
+    Collection<KerberosIdentityDescriptor> identities = new ArrayList<KerberosIdentityDescriptor>();
+    identities.add(identity1);
+    identities.add(identity2);
+    identities.add(identity3);
+    identities.add(identity4);
+    identities.add(identity5);
+
+    Map<String, Collection<KerberosIdentityDescriptor>> activeIdentities = new HashMap<String, Collection<KerberosIdentityDescriptor>>();
+    activeIdentities.put("Host100", identities);
+
+    KerberosHelper kerberosHelper = createStrictMock(KerberosHelper.class);
+    expect(kerberosHelper.getActiveIdentities("Cluster100", "Host100", null, null, true))
+        .andReturn(activeIdentities)
+        .times(1);
+
+    // replay
+    replayAll();
+
+    ResourceProvider provider = new HostKerberosIdentityResourceProvider(managementController);
+
+    // Set injected values...
+    Field field;
+    field = HostKerberosIdentityResourceProvider.class.getDeclaredField("kerberosHelper");
+    field.setAccessible(true);
+    field.set(provider, kerberosHelper);
+
+    field = HostKerberosIdentityResourceProvider.class.getDeclaredField("kerberosPrincipalDAO");
+    field.setAccessible(true);
+    field.set(provider, kerberosPrincipalDAO);
+
+    field = HostKerberosIdentityResourceProvider.class.getDeclaredField("kerberosPrincipalHostDAO");
+    field.setAccessible(true);
+    field.set(provider, kerberosPrincipalHostDAO);
+
+    field = HostKerberosIdentityResourceProvider.class.getDeclaredField("hostDAO");
+    field.setAccessible(true);
+    field.set(provider, hostDAO);
+
+    Set<String> propertyIds = new HashSet<String>();
+
+    propertyIds.add(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_CLUSTER_NAME_PROPERTY_ID);
+    propertyIds.add(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_HOST_NAME_PROPERTY_ID);
+    propertyIds.add(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_DESCRIPTION_PROPERTY_ID);
+    propertyIds.add(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_PRINCIPAL_NAME_PROPERTY_ID);
+    propertyIds.add(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_PRINCIPAL_TYPE_PROPERTY_ID);
+    propertyIds.add(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_PRINCIPAL_LOCAL_USERNAME_PROPERTY_ID);
+    propertyIds.add(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_KEYTAB_FILE_PATH_PROPERTY_ID);
+    propertyIds.add(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_KEYTAB_FILE_OWNER_PROPERTY_ID);
+    propertyIds.add(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_KEYTAB_FILE_OWNER_ACCESS_PROPERTY_ID);
+    propertyIds.add(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_KEYTAB_FILE_GROUP_PROPERTY_ID);
+    propertyIds.add(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_KEYTAB_FILE_GROUP_ACCESS_PROPERTY_ID);
+    propertyIds.add(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_KEYTAB_FILE_MODE_PROPERTY_ID);
+    propertyIds.add(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_KEYTAB_FILE_INSTALLED_PROPERTY_ID);
+
+    Predicate predicate = new PredicateBuilder()
+        .property(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_CLUSTER_NAME_PROPERTY_ID)
+        .equals("Cluster100")
+        .and()
+        .property(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_HOST_NAME_PROPERTY_ID)
+        .equals("Host100").toPredicate();
+
+    Request request = PropertyHelper.getReadRequest(propertyIds);
+    Set<Resource> resources = provider.getResources(request, predicate);
+
+    Assert.assertEquals(3, resources.size());
+
+    for (Resource resource : resources) {
+      Assert.assertEquals("Cluster100",
+          resource.getPropertyValue(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_CLUSTER_NAME_PROPERTY_ID));
+      Assert.assertEquals("Host100",
+          resource.getPropertyValue(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_HOST_NAME_PROPERTY_ID));
+
+      String principal = (String) resource.getPropertyValue(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_PRINCIPAL_NAME_PROPERTY_ID);
+
+      if ("principal1@EXAMPLE.COM".equals(principal)) {
+        Assert.assertEquals("identity1",
+            resource.getPropertyValue(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_DESCRIPTION_PROPERTY_ID));
+
+        Assert.assertEquals(KerberosPrincipalType.USER,
+            resource.getPropertyValue(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_PRINCIPAL_TYPE_PROPERTY_ID));
+
+        Assert.assertEquals("principal1",
+            resource.getPropertyValue(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_PRINCIPAL_LOCAL_USERNAME_PROPERTY_ID));
+
+        Assert.assertEquals("/etc/security/keytabs/principal1.headless.keytab",
+            resource.getPropertyValue(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_KEYTAB_FILE_PATH_PROPERTY_ID));
+
+        Assert.assertEquals("principal1",
+            resource.getPropertyValue(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_KEYTAB_FILE_OWNER_PROPERTY_ID));
+
+        Assert.assertEquals("rw",
+            resource.getPropertyValue(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_KEYTAB_FILE_OWNER_ACCESS_PROPERTY_ID));
+
+        Assert.assertEquals("principal1",
+            resource.getPropertyValue(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_KEYTAB_FILE_GROUP_PROPERTY_ID));
+
+        Assert.assertEquals("r",
+            resource.getPropertyValue(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_KEYTAB_FILE_GROUP_ACCESS_PROPERTY_ID));
+
+        Assert.assertEquals("640",
+            resource.getPropertyValue(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_KEYTAB_FILE_MODE_PROPERTY_ID));
+
+        Assert.assertEquals("true",
+            resource.getPropertyValue(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_KEYTAB_FILE_INSTALLED_PROPERTY_ID));
+      } else if ("principal2/Host100@EXAMPLE.COM".equals(principal)) {
+        Assert.assertEquals("identity2",
+            resource.getPropertyValue(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_DESCRIPTION_PROPERTY_ID));
+
+        Assert.assertEquals(KerberosPrincipalType.SERVICE,
+            resource.getPropertyValue(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_PRINCIPAL_TYPE_PROPERTY_ID));
+
+        Assert.assertEquals("principal2",
+            resource.getPropertyValue(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_PRINCIPAL_LOCAL_USERNAME_PROPERTY_ID));
+
+        Assert.assertNull(resource.getPropertyValue(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_KEYTAB_FILE_PATH_PROPERTY_ID));
+        Assert.assertNull(resource.getPropertyValue(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_KEYTAB_FILE_OWNER_PROPERTY_ID));
+        Assert.assertNull(resource.getPropertyValue(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_KEYTAB_FILE_OWNER_ACCESS_PROPERTY_ID));
+        Assert.assertNull(resource.getPropertyValue(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_KEYTAB_FILE_GROUP_PROPERTY_ID));
+        Assert.assertNull(resource.getPropertyValue(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_KEYTAB_FILE_GROUP_ACCESS_PROPERTY_ID));
+        Assert.assertNull(resource.getPropertyValue(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_KEYTAB_FILE_MODE_PROPERTY_ID));
+
+        Assert.assertEquals("false",
+            resource.getPropertyValue(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_KEYTAB_FILE_INSTALLED_PROPERTY_ID));
+      } else if ("principal5@EXAMPLE.COM".equals(principal)) {
+        Assert.assertEquals("identity5",
+            resource.getPropertyValue(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_DESCRIPTION_PROPERTY_ID));
+
+        Assert.assertEquals(KerberosPrincipalType.USER,
+            resource.getPropertyValue(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_PRINCIPAL_TYPE_PROPERTY_ID));
+
+        Assert.assertEquals("principal5",
+            resource.getPropertyValue(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_PRINCIPAL_LOCAL_USERNAME_PROPERTY_ID));
+
+        Assert.assertEquals("/etc/security/keytabs/principal5.headless.keytab",
+            resource.getPropertyValue(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_KEYTAB_FILE_PATH_PROPERTY_ID));
+
+        Assert.assertEquals("principal5",
+            resource.getPropertyValue(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_KEYTAB_FILE_OWNER_PROPERTY_ID));
+
+        Assert.assertEquals("r",
+            resource.getPropertyValue(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_KEYTAB_FILE_OWNER_ACCESS_PROPERTY_ID));
+
+        Assert.assertEquals("hadoop",
+            resource.getPropertyValue(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_KEYTAB_FILE_GROUP_PROPERTY_ID));
+
+        Assert.assertEquals("r",
+            resource.getPropertyValue(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_KEYTAB_FILE_GROUP_ACCESS_PROPERTY_ID));
+
+        Assert.assertEquals("440",
+            resource.getPropertyValue(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_KEYTAB_FILE_MODE_PROPERTY_ID));
+
+        Assert.assertEquals("unknown",
+            resource.getPropertyValue(HostKerberosIdentityResourceProvider.KERBEROS_IDENTITY_KEYTAB_FILE_INSTALLED_PROPERTY_ID));
+      } else {
+        Assert.fail("Unexpected principal: " + principal);
+      }
+    }
+
+    // verify
+    verifyAll();
+  }
+}