Просмотр исходного кода

AMBARI-19528 : loading coresite and hdfssite and custom configs in view before creating hdfs connection. changed the view.xml of views for adding custom properties. updated pom.xml of hive-next, files, pig, hive20, wfmanager, utils and commons. (nitirajrathore)

Nitiraj Rathore 8 лет назад
Родитель
Сommit
40e60bde87
40 измененных файлов с 879 добавлено и 286 удалено
  1. 8 1
      ambari-server/src/main/java/org/apache/ambari/server/view/ClusterImpl.java
  2. 30 0
      ambari-server/src/main/java/org/apache/ambari/server/view/RemoteAmbariCluster.java
  3. 6 0
      ambari-views/src/main/java/org/apache/ambari/view/cluster/Cluster.java
  4. 20 0
      contrib/views/commons/pom.xml
  5. 22 14
      contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/FileOperationService.java
  6. 15 6
      contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/HdfsService.java
  7. 27 16
      contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/UploadService.java
  8. 15 5
      contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/UserService.java
  9. 55 0
      contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/ViewPropertyHelper.java
  10. 32 12
      contrib/views/files/pom.xml
  11. 42 35
      contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/DownloadService.java
  12. 19 10
      contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FileBrowserService.java
  13. 15 1
      contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FilePreviewService.java
  14. 13 4
      contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HelpService.java
  15. 7 0
      contrib/views/files/src/main/resources/view.xml
  16. 48 16
      contrib/views/hive-next/pom.xml
  17. 9 1
      contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/internal/HdfsApiSupplier.java
  18. 19 2
      contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/files/FileService.java
  19. 13 1
      contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/SharedObjectsFactory.java
  20. 6 0
      contrib/views/hive-next/src/main/resources/view.xml
  21. 48 16
      contrib/views/hive20/pom.xml
  22. 9 1
      contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/HdfsApiSupplier.java
  23. 11 1
      contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/FileService.java
  24. 18 2
      contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/files/FileService.java
  25. 13 1
      contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/SharedObjectsFactory.java
  26. 9 0
      contrib/views/hive20/src/main/resources/view.xml
  27. 2 1
      contrib/views/jobs/src/main/resources/ui/.gitignore
  28. 27 7
      contrib/views/pig/pom.xml
  29. 20 2
      contrib/views/pig/src/main/java/org/apache/ambari/view/pig/resources/files/FileService.java
  30. 15 1
      contrib/views/pig/src/main/java/org/apache/ambari/view/pig/utils/UserLocalObjects.java
  31. 7 0
      contrib/views/pig/src/main/resources/view.xml
  32. 2 1
      contrib/views/pom.xml
  33. 70 12
      contrib/views/utils/pom.xml
  34. 87 72
      contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/ConfigurationBuilder.java
  35. 16 13
      contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
  36. 44 23
      contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java
  37. 20 0
      contrib/views/wfmanager/pom.xml
  38. 15 5
      contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/FileServices.java
  39. 17 4
      contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/HDFSFileUtils.java
  40. 8 0
      contrib/views/wfmanager/src/main/resources/view.xml

+ 8 - 1
ambari-server/src/main/java/org/apache/ambari/server/view/ClusterImpl.java

@@ -18,13 +18,14 @@
 
 package org.apache.ambari.server.view;
 
+import com.google.common.collect.ImmutableMap;
 import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.ServiceComponentHost;
-import org.apache.ambari.view.ClusterType;
 import org.apache.ambari.view.cluster.Cluster;
 
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Map;
 
 /**
  * View associated cluster implementation.
@@ -64,6 +65,12 @@ public class ClusterImpl implements Cluster {
     return config == null ? null : config.getProperties().get(key);
   }
 
+  @Override
+  public Map<String, String> getConfigByType(String type) {
+    Config configs = cluster.getDesiredConfigByType(type);
+    return ImmutableMap.copyOf(configs.getProperties());
+  }
+
   @Override
   public List<String> getHostsForServiceComponent(String serviceName, String componentName){
     List<ServiceComponentHost> serviceComponentHosts = cluster.getServiceComponentHosts(serviceName, componentName);

+ 30 - 0
ambari-server/src/main/java/org/apache/ambari/server/view/RemoteAmbariCluster.java

@@ -21,8 +21,10 @@ package org.apache.ambari.server.view;
 import com.google.common.cache.CacheBuilder;
 import com.google.common.cache.CacheLoader;
 import com.google.common.cache.LoadingCache;
+import com.google.gson.Gson;
 import com.google.gson.JsonElement;
 import com.google.gson.JsonParser;
+import com.google.gson.reflect.TypeToken;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.orm.entities.RemoteAmbariClusterEntity;
 import org.apache.ambari.view.AmbariHttpException;
@@ -35,8 +37,10 @@ import java.io.InputStream;
 import java.net.MalformedURLException;
 import java.net.URL;
 import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.TimeUnit;
@@ -157,6 +161,32 @@ public class RemoteAmbariCluster implements Cluster {
     return property.getAsJsonPrimitive().getAsString();
   }
 
+  @Override
+  public Map<String, String> getConfigByType(String type) {
+    JsonElement config = null;
+    try {
+      String desiredTag = getDesiredConfig(type);
+      if (desiredTag != null) {
+        config = configurationCache.get(String.format("%s/configurations?(type=%s&tag=%s)",this.clusterPath, type, desiredTag));
+      }
+    } catch (ExecutionException e) {
+      throw new RemoteAmbariConfigurationReadException("Can't retrieve configuration from Remote Ambari", e);
+    }
+    if (config == null || !config.isJsonObject()) return null;
+    JsonElement items = config.getAsJsonObject().get("items");
+
+    if (items == null || !items.isJsonArray()) return null;
+    JsonElement item = items.getAsJsonArray().get(0);
+
+    if (item == null || !item.isJsonObject()) return null;
+    JsonElement properties = item.getAsJsonObject().get("properties");
+
+    if (properties == null || !properties.isJsonObject()) return null;
+
+    Map<String, String> retMap = new Gson().fromJson(properties, new TypeToken<HashMap<String, String>>() {}.getType());
+    return retMap;
+  }
+
   @Override
   public List<String> getHostsForServiceComponent(String serviceName, String componentName) {
     String url = String.format("%s/services/%s/components/%s?" +

+ 6 - 0
ambari-views/src/main/java/org/apache/ambari/view/cluster/Cluster.java

@@ -19,6 +19,7 @@
 package org.apache.ambari.view.cluster;
 
 import java.util.List;
+import java.util.Map;
 
 /**
  * View associated cluster.  A cluster may be associated with a view instance so that the view instance may pull
@@ -42,6 +43,11 @@ public interface Cluster {
    */
   public String getConfigurationValue(String type, String key);
 
+  /**
+   * @param type : the type (site) for which the configurations are required.
+   * @return : return a map containing all the key values of configurations
+   */
+  public Map<String,String> getConfigByType(String type);
   /**
    * Get the hosts for service and componet
    *

+ 20 - 0
contrib/views/commons/pom.xml

@@ -53,6 +53,14 @@
           <groupId>tomcat</groupId>
           <artifactId>jasper-runtime</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>xerces</groupId>
+          <artifactId>xercesImpl</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-core</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
 
@@ -65,6 +73,18 @@
           <groupId>tomcat</groupId>
           <artifactId>jasper-runtime</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpclient</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpcore</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-core</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
 

+ 22 - 14
contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/FileOperationService.java

@@ -35,6 +35,7 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.ListIterator;
+import java.util.Map;
 
 /**
  * File operations service
@@ -49,6 +50,14 @@ public class FileOperationService extends HdfsService {
     super(context);
   }
 
+  /**
+   * Constructor
+   * @param context View Context instance
+   */
+  public FileOperationService(ViewContext context, Map<String, String> customProperties) {
+    super(context, customProperties);
+  }
+
   /**
    * List dir
    * @param path path
@@ -60,8 +69,8 @@ public class FileOperationService extends HdfsService {
   public Response listdir(@QueryParam("path") String path) {
     try {
       JSONObject response = new JSONObject();
-      response.put("files", getApi(context).fileStatusToJSON(getApi(context).listdir(path)));
-      response.put("meta", getApi(context).fileStatusToJSON(getApi(context).getFileStatus(path)));
+      response.put("files", getApi().fileStatusToJSON(getApi().listdir(path)));
+      response.put("meta", getApi().fileStatusToJSON(getApi().getFileStatus(path)));
       return Response.ok(response).build();
     } catch (WebApplicationException ex) {
       throw ex;
@@ -83,10 +92,10 @@ public class FileOperationService extends HdfsService {
   @Produces(MediaType.APPLICATION_JSON)
   public Response rename(final SrcDstFileRequest request) {
     try {
-      HdfsApi api = getApi(context);
+      HdfsApi api = getApi();
       ResponseBuilder result;
       if (api.rename(request.src, request.dst)) {
-        result = Response.ok(getApi(context).fileStatusToJSON(api
+        result = Response.ok(getApi().fileStatusToJSON(api
             .getFileStatus(request.dst)));
       } else {
         result = Response.ok(new FileOperationResult(false, "Can't move '" + request.src + "' to '" + request.dst + "'")).status(422);
@@ -110,10 +119,10 @@ public class FileOperationService extends HdfsService {
   @Produces(MediaType.APPLICATION_JSON)
   public Response chmod(final ChmodRequest request) {
     try {
-      HdfsApi api = getApi(context);
+      HdfsApi api = getApi();
       ResponseBuilder result;
       if (api.chmod(request.path, request.mode)) {
-        result = Response.ok(getApi(context).fileStatusToJSON(api
+        result = Response.ok(getApi().fileStatusToJSON(api
             .getFileStatus(request.path)));
       } else {
         result = Response.ok(new FileOperationResult(false, "Can't chmod '" + request.path + "'")).status(422);
@@ -138,7 +147,7 @@ public class FileOperationService extends HdfsService {
   public Response move(final MultiSrcDstFileRequest request,
                        @Context HttpHeaders headers, @Context UriInfo ui) {
     try {
-      HdfsApi api = getApi(context);
+      HdfsApi api = getApi();
       ResponseBuilder result;
       String message = "";
 
@@ -192,7 +201,7 @@ public class FileOperationService extends HdfsService {
   public Response copy(final MultiSrcDstFileRequest request,
                        @Context HttpHeaders headers, @Context UriInfo ui) {
     try {
-      HdfsApi api = getApi(context);
+      HdfsApi api = getApi();
       ResponseBuilder result;
       String message = "";
 
@@ -240,10 +249,10 @@ public class FileOperationService extends HdfsService {
   @Produces(MediaType.APPLICATION_JSON)
   public Response mkdir(final MkdirRequest request) {
     try{
-      HdfsApi api = getApi(context);
+      HdfsApi api = getApi();
       ResponseBuilder result;
       if (api.mkdir(request.path)) {
-        result = Response.ok(getApi(context).fileStatusToJSON(api.getFileStatus(request.path)));
+        result = Response.ok(getApi().fileStatusToJSON(api.getFileStatus(request.path)));
       } else {
         result = Response.ok(new FileOperationResult(false, "Can't create dir '" + request.path + "'")).status(422);
       }
@@ -264,7 +273,7 @@ public class FileOperationService extends HdfsService {
   @Produces(MediaType.APPLICATION_JSON)
   public Response emptyTrash() {
     try {
-      HdfsApi api = getApi(context);
+      HdfsApi api = getApi();
       api.emptyTrash();
       return Response.ok(new FileOperationResult(true)).build();
     } catch (WebApplicationException ex) {
@@ -286,7 +295,7 @@ public class FileOperationService extends HdfsService {
   public Response moveToTrash(MultiRemoveRequest request) {
     try {
       ResponseBuilder result;
-      HdfsApi api = getApi(context);
+      HdfsApi api = getApi();
       String trash = api.getTrashDirPath();
       String message = "";
 
@@ -343,7 +352,7 @@ public class FileOperationService extends HdfsService {
   public Response remove(MultiRemoveRequest request, @Context HttpHeaders headers,
                          @Context UriInfo ui) {
     try {
-      HdfsApi api = getApi(context);
+      HdfsApi api = getApi();
       ResponseBuilder result;
       String message = "";
       if(request.paths.size() == 0) {
@@ -417,7 +426,6 @@ public class FileOperationService extends HdfsService {
     return srcPath.substring(srcPath.lastIndexOf('/') + 1);
   }
 
-
   /**
    * Wrapper for json mapping of mkdir request
    */

+ 15 - 6
contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/HdfsService.java

@@ -18,9 +18,6 @@
 
 package org.apache.ambari.view.commons.hdfs;
 
-import javax.ws.rs.WebApplicationException;
-import javax.xml.bind.annotation.XmlRootElement;
-
 import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.commons.exceptions.ServiceFormattedException;
 import org.apache.ambari.view.utils.hdfs.HdfsApi;
@@ -29,6 +26,8 @@ import org.apache.hadoop.security.UserGroupInformation;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import javax.ws.rs.WebApplicationException;
+import javax.xml.bind.annotation.XmlRootElement;
 import java.io.IOException;
 import java.util.HashMap;
 import java.util.List;
@@ -42,6 +41,7 @@ public abstract class HdfsService {
   protected static final Logger logger = LoggerFactory.getLogger(HdfsService.class);
 
   protected final ViewContext context;
+  private Map<String, String> customProperties;
 
   /**
    * Constructor
@@ -51,6 +51,11 @@ public abstract class HdfsService {
     this.context = context;
   }
 
+  public HdfsService(ViewContext context, Map<String, String> customProperties) {
+    this.context = context;
+    this.customProperties = customProperties;
+  }
+
   /**
    * Wrapper for json mapping of result of Multi Remove Request
    */
@@ -84,14 +89,18 @@ public abstract class HdfsService {
 
   /**
    * Ger HdfsApi instance
-   * @param context View Context instance
    * @return HdfsApi business delegate
    */
-  public HdfsApi getApi(ViewContext context) {
+  public HdfsApi getApi() {
     if (_api == null) {
       try {
-        _api = HdfsUtil.connectToHDFSApi(context);
+        if(this.customProperties != null){
+          _api = HdfsUtil.connectToHDFSApi(context, customProperties);
+        }else{
+          _api = HdfsUtil.connectToHDFSApi(context);
+        }
       } catch (Exception ex) {
+        logger.error("Exception while connecting to hdfs : {}", ex.getMessage(), ex);
         throw new ServiceFormattedException("HdfsApi connection failed. Check \"webhdfs.url\" property", ex);
       }
     }

+ 27 - 16
contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/UploadService.java

@@ -18,23 +18,25 @@
 
 package org.apache.ambari.view.commons.hdfs;
 
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.zip.ZipEntry;
-import java.util.zip.ZipInputStream;
-
-import javax.ws.rs.*;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-
+import com.sun.jersey.core.header.FormDataContentDisposition;
+import com.sun.jersey.multipart.FormDataParam;
 import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.commons.exceptions.ServiceFormattedException;
-import org.apache.ambari.view.commons.hdfs.HdfsService;
 import org.apache.ambari.view.utils.hdfs.HdfsApi;
 import org.apache.hadoop.fs.FSDataOutputStream;
 
-import com.sun.jersey.core.header.FormDataContentDisposition;
-import com.sun.jersey.multipart.FormDataParam;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Map;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipInputStream;
 
 /**
  * Upload service
@@ -49,13 +51,22 @@ public class UploadService extends HdfsService {
     super(context);
   }
 
+  /**
+   * takes context and any extra custom properties that needs to be included into config
+   * @param context
+   * @param customProperties
+   */
+  public UploadService(ViewContext context, Map<String, String> customProperties) {
+    super(context, customProperties);
+  }
+
   private void uploadFile(final String filePath, InputStream uploadedInputStream)
       throws IOException, InterruptedException {
     int read;
     byte[] chunk = new byte[1024];
     FSDataOutputStream out = null;
     try {
-      out = getApi(context).create(filePath, false);
+      out = getApi().create(filePath, false);
       while ((read = uploadedInputStream.read(chunk)) != -1) {
         out.write(chunk, 0, read);
       }
@@ -86,7 +97,7 @@ public class UploadService extends HdfsService {
       String filePath = path + contentDisposition.getFileName();
       uploadFile(filePath, uploadedInputStream);
       return Response.ok(
-          getApi(context).fileStatusToJSON(getApi(context).getFileStatus(filePath)))
+          getApi().fileStatusToJSON(getApi().getFileStatus(filePath)))
           .build();
     } catch (WebApplicationException ex) {
       throw ex;
@@ -117,7 +128,7 @@ public class UploadService extends HdfsService {
         path = path + "/";
       ZipInputStream zip = new ZipInputStream(uploadedInputStream);
       ZipEntry ze = zip.getNextEntry();
-      HdfsApi api = getApi(context);
+      HdfsApi api = getApi();
       while (ze != null) {
         String filePath = path + ze.getName();
         if (ze.isDirectory()) {
@@ -127,7 +138,7 @@ public class UploadService extends HdfsService {
         }
         ze = zip.getNextEntry();
       }
-      return Response.ok(getApi(context).fileStatusToJSON(api.listdir(path))).build();
+      return Response.ok(getApi().fileStatusToJSON(api.listdir(path))).build();
     } catch (WebApplicationException ex) {
       throw ex;
     } catch (Exception ex) {

+ 15 - 5
contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/UserService.java

@@ -30,6 +30,7 @@ import javax.ws.rs.WebApplicationException;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
 import java.io.FileNotFoundException;
+import java.util.Map;
 
 /**
  * User related info service
@@ -44,6 +45,15 @@ public class UserService extends HdfsService {
     super(context);
   }
 
+  /**
+   * takes context and any extra custom properties that needs to be included into config
+   * @param context
+   * @param customProperties
+   */
+  public UserService(ViewContext context, Map<String, String> customProperties) {
+    super(context, customProperties);
+  }
+
   /**
    * Returns home directory
    * @return home directory
@@ -53,9 +63,9 @@ public class UserService extends HdfsService {
   @Produces(MediaType.APPLICATION_JSON)
   public Response homeDir() {
     try {
-      HdfsApi api = getApi(context);
+      HdfsApi api = getApi();
       return Response
-        .ok(getApi(context).fileStatusToJSON(api.getFileStatus(api.getHomeDir()
+        .ok(getApi().fileStatusToJSON(api.getFileStatus(api.getHomeDir()
           .toString()))).build();
     } catch (WebApplicationException ex) {
       throw ex;
@@ -73,7 +83,7 @@ public class UserService extends HdfsService {
   @Produces(MediaType.APPLICATION_JSON)
   public Response trashEnabled() {
     try {
-      HdfsApi api = getApi(context);
+      HdfsApi api = getApi();
       return Response.ok(new FileOperationResult(api.trashEnabled())).build();
     } catch (WebApplicationException ex) {
       throw ex;
@@ -91,9 +101,9 @@ public class UserService extends HdfsService {
   @Produces(MediaType.APPLICATION_JSON)
   public Response trashdir() {
     try {
-      HdfsApi api = getApi(context);
+      HdfsApi api = getApi();
       return Response.ok(
-        getApi(context).fileStatusToJSON(api.getFileStatus(api.getTrashDir()
+        getApi().fileStatusToJSON(api.getFileStatus(api.getTrashDir()
           .toString()))).build();
     } catch (WebApplicationException ex) {
       throw ex;

+ 55 - 0
contrib/views/commons/src/main/java/org/apache/ambari/view/commons/hdfs/ViewPropertyHelper.java

@@ -0,0 +1,55 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.ambari.view.commons.hdfs;
+
+import com.google.common.base.Optional;
+import com.google.common.base.Strings;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.utils.hdfs.ConfigurationBuilder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class ViewPropertyHelper {
+  private static final Logger LOG = LoggerFactory.getLogger(ConfigurationBuilder.class);
+
+  public static Optional<Map<String, String>> getViewConfigs(ViewContext context, String viewConfigPropertyName) {
+    Map<String, String> viewConfigs = new HashMap<>();
+    String keyValues = context.getProperties().get(viewConfigPropertyName);
+    LOG.debug("{} : {}", viewConfigPropertyName, keyValues);
+    if (Strings.isNullOrEmpty(keyValues)) {
+      LOG.info("No values found in {} property.", viewConfigPropertyName);
+      return Optional.absent();
+    }
+
+    for (String entry : keyValues.split(";")) {
+      String[] kv = entry.split("=");
+      if (kv.length != 2) {
+        LOG.error("Ignoring entry {}, because it is not formatted like key=value");
+        continue;
+      }
+
+      viewConfigs.put(kv[0], kv[1]);
+    }
+
+    return Optional.of(viewConfigs);
+  }
+}

+ 32 - 12
contrib/views/files/pom.xml

@@ -33,23 +33,43 @@
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-hdfs</artifactId>
       <version>${hadoop.version}</version>
-        <exclusions>
-            <exclusion>
-                <groupId>tomcat</groupId>
-                <artifactId>jasper-runtime</artifactId>
-            </exclusion>
-        </exclusions>
+      <exclusions>
+        <exclusion>
+          <groupId>tomcat</groupId>
+          <artifactId>jasper-runtime</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>xerces</groupId>
+          <artifactId>xercesImpl</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-core</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-common</artifactId>
       <version>${hadoop.version}</version>
-        <exclusions>
-            <exclusion>
-                <groupId>tomcat</groupId>
-                <artifactId>jasper-runtime</artifactId>
-            </exclusion>
-        </exclusions>
+      <exclusions>
+        <exclusion>
+          <groupId>tomcat</groupId>
+          <artifactId>jasper-runtime</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpclient</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpcore</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-core</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>junit</groupId>

+ 42 - 35
contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/DownloadService.java

@@ -18,18 +18,21 @@
 
 package org.apache.ambari.view.filebrowser;
 
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.net.FileNameMap;
-import java.net.URLConnection;
-import java.util.Arrays;
-import java.util.LinkedList;
-import java.util.Queue;
-import java.util.UUID;
-import java.util.zip.ZipEntry;
-import java.util.zip.ZipOutputStream;
+import com.google.gson.Gson;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.commons.exceptions.MisconfigurationFormattedException;
+import org.apache.ambari.view.commons.exceptions.NotFoundFormattedException;
+import org.apache.ambari.view.commons.exceptions.ServiceFormattedException;
+import org.apache.ambari.view.commons.hdfs.HdfsService;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
+import org.apache.ambari.view.utils.hdfs.HdfsApiException;
+import org.apache.ambari.view.utils.hdfs.HdfsUtil;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.security.AccessControlException;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.ws.rs.Consumes;
 import javax.ws.rs.GET;
@@ -46,22 +49,18 @@ import javax.ws.rs.core.Response.ResponseBuilder;
 import javax.ws.rs.core.StreamingOutput;
 import javax.ws.rs.core.UriInfo;
 import javax.xml.bind.annotation.XmlElement;
-
-import com.google.gson.Gson;
-import org.apache.ambari.view.commons.exceptions.MisconfigurationFormattedException;
-import org.apache.ambari.view.commons.exceptions.NotFoundFormattedException;
-import org.apache.ambari.view.commons.exceptions.ServiceFormattedException;
-import org.apache.ambari.view.commons.hdfs.HdfsService;
-import org.apache.ambari.view.utils.hdfs.HdfsApi;
-import org.apache.ambari.view.utils.hdfs.HdfsApiException;
-import org.apache.ambari.view.utils.hdfs.HdfsUtil;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.ambari.view.ViewContext;
-import org.apache.hadoop.security.AccessControlException;
-import org.json.simple.JSONObject;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.net.FileNameMap;
+import java.net.URLConnection;
+import java.util.LinkedList;
+import java.util.Map;
+import java.util.Queue;
+import java.util.UUID;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipOutputStream;
 
 /**
  * Service for download and aggregate files
@@ -74,6 +73,14 @@ public class DownloadService extends HdfsService {
     super(context);
   }
 
+  /**
+   * @param context
+   * @param customProperties : extra properties that need to be included into config
+   */
+  public DownloadService(ViewContext context, Map<String, String> customProperties) {
+    super(context, customProperties);
+  }
+
   /**
    * Download entire file
    * @param path path to file
@@ -92,7 +99,7 @@ public class DownloadService extends HdfsService {
                          @Context HttpHeaders headers, @Context UriInfo ui) {
     LOG.debug("browsing path : {} with download : {}", path, download);
     try {
-      HdfsApi api = getApi(context);
+      HdfsApi api = getApi();
       FileStatus status = api.getFileStatus(path);
       FSDataInputStream fs = api.open(path);
       if(checkperm) {
@@ -127,7 +134,7 @@ public class DownloadService extends HdfsService {
 
   private void zipFile(ZipOutputStream zip, String path) {
     try {
-      FSDataInputStream in = getApi(context).open(path);
+      FSDataInputStream in = getApi().open(path);
       zip.putNextEntry(new ZipEntry(path.substring(1)));
       byte[] chunk = new byte[1024];
 
@@ -185,7 +192,7 @@ public class DownloadService extends HdfsService {
             ServiceFormattedException {
           ZipOutputStream zip = new ZipOutputStream(output);
           try {
-            HdfsApi api = getApi(context);
+            HdfsApi api = getApi();
             Queue<String> files = new LinkedList<String>();
             for (String file : request.entries) {
               files.add(file);
@@ -249,7 +256,7 @@ public class DownloadService extends HdfsService {
           for (String path : request.entries) {
             try {
               try {
-                in = getApi(context).open(path);
+                in = getApi().open(path);
               } catch (AccessControlException ex) {
                 LOG.error("Error in opening file {}. Ignoring concat of this files.", path.substring(1), ex);
                 continue;
@@ -380,7 +387,7 @@ public class DownloadService extends HdfsService {
 
   private DownloadRequest getDownloadRequest(String requestId) throws HdfsApiException, IOException, InterruptedException {
     String fileName = getFileNameForRequestData(requestId);
-    String json = HdfsUtil.readFile(getApi(context), fileName);
+    String json = HdfsUtil.readFile(getApi(), fileName);
     DownloadRequest request = gson.fromJson(json, DownloadRequest.class);
 
     deleteFileFromHdfs(fileName);
@@ -399,7 +406,7 @@ public class DownloadService extends HdfsService {
   private void writeToHdfs(String uuid, String json) {
     String fileName = getFileNameForRequestData(uuid);
     try {
-      HdfsUtil.putStringToFile(getApi(context), fileName, json);
+      HdfsUtil.putStringToFile(getApi(), fileName, json);
     } catch (HdfsApiException e) {
       LOG.error("Failed to write request data to HDFS", e);
       throw new ServiceFormattedException("Failed to write request data to HDFS", e);
@@ -416,7 +423,7 @@ public class DownloadService extends HdfsService {
   }
 
   private void deleteFileFromHdfs(String fileName) throws IOException, InterruptedException {
-    getApi(context).delete(fileName, true);
+    getApi().delete(fileName, true);
   }
 
 

+ 19 - 10
contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FileBrowserService.java

@@ -18,19 +18,23 @@
 
 package org.apache.ambari.view.filebrowser;
 
-import javax.ws.rs.Path;
-
-import org.apache.ambari.view.ViewContext;
-
+import com.google.common.base.Optional;
 import com.google.inject.Inject;
+import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.commons.hdfs.FileOperationService;
 import org.apache.ambari.view.commons.hdfs.UploadService;
 import org.apache.ambari.view.commons.hdfs.UserService;
+import org.apache.ambari.view.commons.hdfs.ViewPropertyHelper;
+
+import javax.ws.rs.Path;
+import java.util.HashMap;
+import java.util.Map;
 
 /**
  * Root files service
  */
 public class FileBrowserService {
+  public static final String VIEW_CONF_KEYVALUES = "view.conf.keyvalues";
 
   @Inject
   ViewContext context;
@@ -41,7 +45,12 @@ public class FileBrowserService {
    */
   @Path("/download")
   public DownloadService download() {
-    return new DownloadService(context);
+    return new DownloadService(context, getViewConfigs());
+  }
+
+  private Map<String,String> getViewConfigs() {
+    Optional<Map<String, String>> props = ViewPropertyHelper.getViewConfigs(context, VIEW_CONF_KEYVALUES);
+    return props.isPresent()? props.get() : new HashMap<String, String>();
   }
 
   /**
@@ -50,7 +59,7 @@ public class FileBrowserService {
    */
   @Path("/upload")
   public UploadService upload() {
-    return new UploadService(context);
+    return new UploadService(context, getViewConfigs());
   }
 
   /**
@@ -59,7 +68,7 @@ public class FileBrowserService {
    */
   @Path("/fileops")
   public FileOperationService fileOps() {
-    return new FileOperationService(context);
+    return new FileOperationService(context, getViewConfigs());
   }
 
   /**
@@ -68,7 +77,7 @@ public class FileBrowserService {
    */
   @Path("/help")
   public HelpService help() {
-    return new HelpService(context);
+    return new HelpService(context, getViewConfigs());
   }
 
   /**
@@ -76,7 +85,7 @@ public class FileBrowserService {
    * @return service
    */
   @Path("/user")
-  public UserService userService() { return new UserService(context); }
+  public UserService userService() { return new UserService(context, getViewConfigs()); }
 
   /**
    * @see org.apache.ambari.view.filebrowser.FilePreviewService
@@ -84,7 +93,7 @@ public class FileBrowserService {
    */
   @Path("/preview")
   public FilePreviewService preview() {
-    return new FilePreviewService(context);
+    return new FilePreviewService(context, getViewConfigs());
   }
 
 }

+ 15 - 1
contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FilePreviewService.java

@@ -37,6 +37,7 @@ import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
 import java.io.FileNotFoundException;
 import java.io.InputStream;
+import java.util.Map;
 
 /**
  * File Preview Service
@@ -49,6 +50,10 @@ public class FilePreviewService extends HdfsService {
   public FilePreviewService(ViewContext context) {
     super(context);
 
+    initCompressionCodecFactory();
+  }
+
+  private void initCompressionCodecFactory() {
     Configuration conf = new Configuration();
     conf.set("io.compression.codecs","org.apache.hadoop.io.compress.GzipCodec," +
       "org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.SnappyCodec," +
@@ -57,13 +62,22 @@ public class FilePreviewService extends HdfsService {
     compressionCodecFactory = new CompressionCodecFactory(conf);
   }
 
+  /**
+   * @param context
+   * @param viewConfigs : extra properties that needs to be included into configs
+   */
+  public FilePreviewService(ViewContext context, Map<String, String> viewConfigs) {
+    super(context, viewConfigs);
+    initCompressionCodecFactory();
+  }
+
   @GET
   @Path("/file")
   @Produces(MediaType.APPLICATION_JSON)
   public Response previewFile(@QueryParam("path") String path, @QueryParam("start") int start, @QueryParam("end") int end) {
     LOG.info("previewing file {}, from start {}, till end {}", path, start, end);
     try {
-      HdfsApi api = getApi(context);
+      HdfsApi api = getApi();
       FileStatus status = api.getFileStatus(path);
 
       CompressionCodec codec = compressionCodecFactory.getCodec(status.getPath());

+ 13 - 4
contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HelpService.java

@@ -18,15 +18,16 @@
 
 package org.apache.ambari.view.filebrowser;
 
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.commons.hdfs.HdfsService;
+import org.json.simple.JSONObject;
+
 import javax.ws.rs.GET;
 import javax.ws.rs.Path;
 import javax.ws.rs.Produces;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.commons.hdfs.HdfsService;
-import org.json.simple.JSONObject;
+import java.util.Map;
 
 /**
  * Help service
@@ -41,6 +42,14 @@ public class HelpService extends HdfsService {
     super(context);
   }
 
+  /**
+   * @param context
+   * @param viewConfigs : extra properties that needs to be included into configs
+   */
+  public HelpService(ViewContext context, Map<String, String> viewConfigs) {
+    super(context, viewConfigs);
+  }
+
   /**
    * Version
    * @return version

+ 7 - 0
contrib/views/files/src/main/resources/view.xml

@@ -141,6 +141,13 @@
         <default-value>/user/${username}/files-view/tmp</default-value>
         <required>true</required>
     </parameter>
+    <parameter>
+        <name>view.conf.keyvalues</name>
+        <description>The key values that will be copied to hdfs connection configuration verbatim. Format : key1=value1;
+          key2=value2</description>
+        <label>View Configs</label>
+        <required>false</required>
+    </parameter>
 
     <resource>
         <name>files</name>

+ 48 - 16
contrib/views/hive-next/pom.xml

@@ -104,12 +104,20 @@
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-hdfs</artifactId>
       <version>${hadoop.version}</version>
-        <exclusions>
-            <exclusion>
-                <groupId>tomcat</groupId>
-                <artifactId>jasper-runtime</artifactId>
-            </exclusion>
-        </exclusions>
+      <exclusions>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-core</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>tomcat</groupId>
+          <artifactId>jasper-runtime</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>xerces</groupId>
+          <artifactId>xercesImpl</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
@@ -124,6 +132,18 @@
           <groupId>tomcat</groupId>
           <artifactId>jasper-compiler</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpclient</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpcore</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-core</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
     <dependency>
@@ -155,10 +175,22 @@
           <groupId>tomcat</groupId>
           <artifactId>jasper-runtime</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-databind</artifactId>
+        </exclusion>
         <exclusion>
           <groupId>tomcat</groupId>
           <artifactId>jasper-compiler</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpclient</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpcore</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
     <dependency>
@@ -175,6 +207,16 @@
       <groupId>org.apache.thrift</groupId>
       <artifactId>libthrift</artifactId>
       <version>0.9.0</version>
+      <exclusions>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpclient</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpcore</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>junit</groupId>
@@ -213,16 +255,6 @@
       <artifactId>commons-io</artifactId>
       <version>2.4</version>
     </dependency>
-    <dependency>
-      <groupId>org.apache.httpcomponents</groupId>
-      <artifactId>httpclient</artifactId>
-      <version>4.5.2</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.httpcomponents</groupId>
-      <artifactId>httpcore</artifactId>
-      <version>4.4.3</version>
-    </dependency>
     <dependency>
       <groupId>org.apache.commons</groupId>
       <artifactId>commons-csv</artifactId>

+ 9 - 1
contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/internal/HdfsApiSupplier.java

@@ -20,6 +20,7 @@ package org.apache.ambari.view.hive2.internal;
 
 import com.google.common.base.Optional;
 import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.commons.hdfs.ViewPropertyHelper;
 import org.apache.ambari.view.utils.hdfs.HdfsApi;
 import org.apache.ambari.view.utils.hdfs.HdfsApiException;
 import org.apache.ambari.view.utils.hdfs.HdfsUtil;
@@ -30,6 +31,7 @@ import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
 
 public class HdfsApiSupplier implements ContextSupplier<Optional<HdfsApi>> {
+  public static final String VIEW_CONF_KEYVALUES = "view.conf.keyvalues";
 
   protected final Logger LOG =
     LoggerFactory.getLogger(getClass());
@@ -44,7 +46,13 @@ public class HdfsApiSupplier implements ContextSupplier<Optional<HdfsApi>> {
         synchronized (lock) {
           if(!hdfsApiMap.containsKey(getKey(context))) {
             LOG.debug("Creating HDFSApi instance for Viewname: {}, Instance Name: {}", context.getViewName(), context.getInstanceName());
-            HdfsApi api = HdfsUtil.connectToHDFSApi(context);
+            Optional<Map<String, String>> props = ViewPropertyHelper.getViewConfigs(context, VIEW_CONF_KEYVALUES);
+            HdfsApi api;
+            if(props.isPresent()){
+              api = HdfsUtil.connectToHDFSApi(context, props.get());
+            }else{
+              api = HdfsUtil.connectToHDFSApi(context);
+            }
             hdfsApiMap.put(getKey(context), api);
             return Optional.of(api);
           }

+ 19 - 2
contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/resources/files/FileService.java

@@ -18,10 +18,12 @@
 
 package org.apache.ambari.view.hive2.resources.files;
 
+import com.google.common.base.Optional;
 import com.jayway.jsonpath.JsonPath;
 import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.ViewResourceHandler;
 import org.apache.ambari.view.commons.hdfs.UserService;
+import org.apache.ambari.view.commons.hdfs.ViewPropertyHelper;
 import org.apache.ambari.view.hive2.BaseService;
 import org.apache.ambari.view.hive2.utils.*;
 import org.apache.ambari.view.utils.hdfs.HdfsApi;
@@ -46,6 +48,7 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.net.URL;
 import java.util.HashMap;
+import java.util.Map;
 
 /**
  * File access resource
@@ -60,6 +63,8 @@ import java.util.HashMap;
  *      update file content
  */
 public class FileService extends BaseService {
+  public static final String VIEW_CONF_KEYVALUES = "view.conf.keyvalues";
+
   public static final String FAKE_FILE = "fakefile://";
   public static final String JSON_PATH_FILE = "jsonpath:";
 
@@ -226,7 +231,13 @@ public class FileService extends BaseService {
    */
   public static void hdfsSmokeTest(ViewContext context) {
     try {
-      HdfsApi api = HdfsUtil.connectToHDFSApi(context);
+      Optional<Map<String, String>> props = ViewPropertyHelper.getViewConfigs(context, VIEW_CONF_KEYVALUES);
+      HdfsApi api;
+      if(props.isPresent()){
+        api = HdfsUtil.connectToHDFSApi(context, props.get());
+      }else{
+        api = HdfsUtil.connectToHDFSApi(context);
+      }
       api.getStatus();
     } catch (WebApplicationException ex) {
       throw ex;
@@ -241,7 +252,7 @@ public class FileService extends BaseService {
    */
   public static void userhomeSmokeTest(ViewContext context) {
     try {
-      UserService userservice = new UserService(context);
+      UserService userservice = new UserService(context, getViewConfigs(context));
       userservice.homeDir();
     } catch (WebApplicationException ex) {
       throw ex;
@@ -263,4 +274,10 @@ public class FileService extends BaseService {
     }
     return filePath;
   }
+
+  private static Map<String,String> getViewConfigs(ViewContext context) {
+    Optional<Map<String, String>> props = ViewPropertyHelper.getViewConfigs(context, VIEW_CONF_KEYVALUES);
+    return props.isPresent()? props.get() : new HashMap<String, String>();
+  }
+
 }

+ 13 - 1
contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/utils/SharedObjectsFactory.java

@@ -18,7 +18,9 @@
 
 package org.apache.ambari.view.hive2.utils;
 
+import com.google.common.base.Optional;
 import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.commons.hdfs.ViewPropertyHelper;
 import org.apache.ambari.view.hive2.persistence.IStorageFactory;
 import org.apache.ambari.view.hive2.persistence.Storage;
 import org.apache.ambari.view.hive2.persistence.utils.StorageFactory;
@@ -45,6 +47,8 @@ import java.util.concurrent.ConcurrentHashMap;
  * will use different connection.
  */
 public class SharedObjectsFactory implements IStorageFactory {
+  public static final String VIEW_CONF_KEYVALUES = "view.conf.keyvalues";
+
   protected final static Logger LOG =
       LoggerFactory.getLogger(SharedObjectsFactory.class);
 
@@ -123,7 +127,15 @@ public class SharedObjectsFactory implements IStorageFactory {
   public HdfsApi getHdfsApi() {
     if (!localObjects.get(HdfsApi.class).containsKey(getTagName())) {
       try {
-        localObjects.get(HdfsApi.class).put(getTagName(), HdfsUtil.connectToHDFSApi(context));
+        Optional<Map<String, String>> props = ViewPropertyHelper.getViewConfigs(context, VIEW_CONF_KEYVALUES);
+        HdfsApi api;
+        if(props.isPresent()){
+          api = HdfsUtil.connectToHDFSApi(context, props.get());
+        }else{
+          api = HdfsUtil.connectToHDFSApi(context);
+        }
+
+        localObjects.get(HdfsApi.class).put(getTagName(), api);
       } catch (HdfsApiException e) {
         String message = "F060 Couldn't open connection to HDFS";
         LOG.error(message);

+ 6 - 0
contrib/views/hive-next/src/main/resources/view.xml

@@ -216,6 +216,12 @@
         <required>true</required>
     </parameter>
 
+    <parameter>
+        <name>view.conf.keyvalues</name>
+        <description>The key values that will be copied to hdfs connection configuration verbatim.</description>
+        <label>View Configs</label>
+        <required>false</required>
+    </parameter>
 
     <parameter>
         <name>use.hive.interactive.mode</name>

+ 48 - 16
contrib/views/hive20/pom.xml

@@ -105,12 +105,20 @@
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-hdfs</artifactId>
       <version>${hadoop.version}</version>
-        <exclusions>
-            <exclusion>
-                <groupId>tomcat</groupId>
-                <artifactId>jasper-runtime</artifactId>
-            </exclusion>
-        </exclusions>
+      <exclusions>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-core</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>tomcat</groupId>
+          <artifactId>jasper-runtime</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>xerces</groupId>
+          <artifactId>xercesImpl</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
@@ -125,6 +133,18 @@
           <groupId>tomcat</groupId>
           <artifactId>jasper-compiler</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpclient</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpcore</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-core</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
     <dependency>
@@ -156,10 +176,22 @@
           <groupId>tomcat</groupId>
           <artifactId>jasper-runtime</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-databind</artifactId>
+        </exclusion>
         <exclusion>
           <groupId>tomcat</groupId>
           <artifactId>jasper-compiler</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpclient</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpcore</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
     <dependency>
@@ -176,6 +208,16 @@
       <groupId>org.apache.thrift</groupId>
       <artifactId>libthrift</artifactId>
       <version>0.9.0</version>
+      <exclusions>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpclient</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpcore</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>junit</groupId>
@@ -214,16 +256,6 @@
       <artifactId>commons-io</artifactId>
       <version>2.4</version>
     </dependency>
-    <dependency>
-      <groupId>org.apache.httpcomponents</groupId>
-      <artifactId>httpclient</artifactId>
-      <version>4.5.2</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.httpcomponents</groupId>
-      <artifactId>httpcore</artifactId>
-      <version>4.4.3</version>
-    </dependency>
     <dependency>
       <groupId>org.apache.commons</groupId>
       <artifactId>commons-csv</artifactId>

+ 9 - 1
contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/HdfsApiSupplier.java

@@ -20,6 +20,7 @@ package org.apache.ambari.view.hive20.internal;
 
 import com.google.common.base.Optional;
 import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.commons.hdfs.ViewPropertyHelper;
 import org.apache.ambari.view.utils.hdfs.HdfsApi;
 import org.apache.ambari.view.utils.hdfs.HdfsApiException;
 import org.apache.ambari.view.utils.hdfs.HdfsUtil;
@@ -30,6 +31,7 @@ import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
 
 public class HdfsApiSupplier implements ContextSupplier<Optional<HdfsApi>> {
+  public static final String VIEW_CONF_KEYVALUES = "view.conf.keyvalues";
 
   protected final Logger LOG =
     LoggerFactory.getLogger(getClass());
@@ -44,7 +46,13 @@ public class HdfsApiSupplier implements ContextSupplier<Optional<HdfsApi>> {
         synchronized (lock) {
           if(!hdfsApiMap.containsKey(getKey(context))) {
             LOG.debug("Creating HDFSApi instance for Viewname: {}, Instance Name: {}", context.getViewName(), context.getInstanceName());
-            HdfsApi api = HdfsUtil.connectToHDFSApi(context);
+            Optional<Map<String, String>> props = ViewPropertyHelper.getViewConfigs(context, VIEW_CONF_KEYVALUES);
+            HdfsApi api;
+            if(props.isPresent()){
+              api = HdfsUtil.connectToHDFSApi(context, props.get());
+            }else{
+              api = HdfsUtil.connectToHDFSApi(context);
+            }
             hdfsApiMap.put(getKey(context), api);
             return Optional.of(api);
           }

+ 11 - 1
contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/browser/FileService.java

@@ -18,18 +18,28 @@
 
 package org.apache.ambari.view.hive20.resources.browser;
 
+import com.google.common.base.Optional;
 import org.apache.ambari.view.commons.hdfs.FileOperationService;
+import org.apache.ambari.view.commons.hdfs.ViewPropertyHelper;
 import org.apache.ambari.view.hive20.BaseService;
 
 import javax.ws.rs.Path;
+import java.util.HashMap;
+import java.util.Map;
 
 /**
  *
  */
 public class FileService extends BaseService {
+  public static final String VIEW_CONF_KEYVALUES = "view.conf.keyvalues";
 
   @Path("/ops")
   public FileOperationService fileOps() {
-    return new FileOperationService(context);
+    return new FileOperationService(context, getViewConfigs());
+  }
+
+  private Map<String,String> getViewConfigs() {
+    Optional<Map<String, String>> props = ViewPropertyHelper.getViewConfigs(context, VIEW_CONF_KEYVALUES);
+    return props.isPresent()? props.get() : new HashMap<String, String>();
   }
 }

+ 18 - 2
contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/files/FileService.java

@@ -18,10 +18,12 @@
 
 package org.apache.ambari.view.hive20.resources.files;
 
+import com.google.common.base.Optional;
 import com.jayway.jsonpath.JsonPath;
 import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.ViewResourceHandler;
 import org.apache.ambari.view.commons.hdfs.UserService;
+import org.apache.ambari.view.commons.hdfs.ViewPropertyHelper;
 import org.apache.ambari.view.hive20.BaseService;
 import org.apache.ambari.view.hive20.utils.*;
 import org.apache.ambari.view.utils.hdfs.HdfsApi;
@@ -46,6 +48,7 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.net.URL;
 import java.util.HashMap;
+import java.util.Map;
 
 /**
  * File access resource
@@ -62,6 +65,7 @@ import java.util.HashMap;
 public class FileService extends BaseService {
   public static final String FAKE_FILE = "fakefile://";
   public static final String JSON_PATH_FILE = "jsonpath:";
+  public static final String VIEW_CONF_KEYVALUES = "view.conf.keyvalues";
 
   @Inject
   ViewResourceHandler handler;
@@ -226,7 +230,14 @@ public class FileService extends BaseService {
    */
   public static void hdfsSmokeTest(ViewContext context) {
     try {
-      HdfsApi api = HdfsUtil.connectToHDFSApi(context);
+      Optional<Map<String, String>> props = ViewPropertyHelper.getViewConfigs(context, VIEW_CONF_KEYVALUES);
+      HdfsApi api;
+      if(props.isPresent()){
+        api = HdfsUtil.connectToHDFSApi(context, props.get());
+      }else{
+        api = HdfsUtil.connectToHDFSApi(context);
+      }
+
       api.getStatus();
     } catch (WebApplicationException ex) {
       throw ex;
@@ -241,7 +252,7 @@ public class FileService extends BaseService {
    */
   public static void userhomeSmokeTest(ViewContext context) {
     try {
-      UserService userservice = new UserService(context);
+      UserService userservice = new UserService(context, getViewConfigs(context));
       userservice.homeDir();
     } catch (WebApplicationException ex) {
       throw ex;
@@ -263,4 +274,9 @@ public class FileService extends BaseService {
     }
     return filePath;
   }
+
+  private static Map<String,String> getViewConfigs(ViewContext context) {
+    Optional<Map<String, String>> props = ViewPropertyHelper.getViewConfigs(context, VIEW_CONF_KEYVALUES);
+    return props.isPresent()? props.get() : new HashMap<String, String>();
+  }
 }

+ 13 - 1
contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/utils/SharedObjectsFactory.java

@@ -18,7 +18,9 @@
 
 package org.apache.ambari.view.hive20.utils;
 
+import com.google.common.base.Optional;
 import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.commons.hdfs.ViewPropertyHelper;
 import org.apache.ambari.view.hive20.persistence.IStorageFactory;
 import org.apache.ambari.view.hive20.persistence.Storage;
 import org.apache.ambari.view.hive20.persistence.utils.StorageFactory;
@@ -45,6 +47,8 @@ import java.util.concurrent.ConcurrentHashMap;
  * will use different connection.
  */
 public class SharedObjectsFactory implements IStorageFactory {
+  public static final String VIEW_CONF_KEYVALUES = "view.conf.keyvalues";
+
   protected final static Logger LOG =
       LoggerFactory.getLogger(SharedObjectsFactory.class);
 
@@ -123,7 +127,15 @@ public class SharedObjectsFactory implements IStorageFactory {
   public HdfsApi getHdfsApi() {
     if (!localObjects.get(HdfsApi.class).containsKey(getTagName())) {
       try {
-        localObjects.get(HdfsApi.class).put(getTagName(), HdfsUtil.connectToHDFSApi(context));
+        Optional<Map<String, String>> props = ViewPropertyHelper.getViewConfigs(context, VIEW_CONF_KEYVALUES);
+        HdfsApi api;
+        if(props.isPresent()){
+          api = HdfsUtil.connectToHDFSApi(context, props.get());
+        }else{
+          api = HdfsUtil.connectToHDFSApi(context);
+        }
+
+        localObjects.get(HdfsApi.class).put(getTagName(), api);
       } catch (HdfsApiException e) {
         String message = "F060 Couldn't open connection to HDFS";
         LOG.error(message);

+ 9 - 0
contrib/views/hive20/src/main/resources/view.xml

@@ -243,6 +243,15 @@
         <required>true</required>
     </parameter>
 
+    <parameter>
+        <name>view.conf.keyvalues</name>
+        <description>The key values that will be copied to hdfs connection configuration verbatim. Format : key1=value1;
+          key2=value2</description>
+        <label>View Configs</label>
+        <required>false</required>
+    </parameter>
+
+
     <resource>
         <name>savedQuery</name>
         <plural-name>savedQueries</plural-name>

+ 2 - 1
contrib/views/jobs/src/main/resources/ui/.gitignore

@@ -5,4 +5,5 @@ dist
 .tmp
 app/bower_components
 test/bower_components
-.editorconfig
+.editorconfig
+node

+ 27 - 7
contrib/views/pig/pom.xml

@@ -87,22 +87,42 @@
       <artifactId>hadoop-hdfs</artifactId>
       <version>${hadoop.version}</version>
         <exclusions>
-            <exclusion>
+          <exclusion>
+            <groupId>com.fasterxml.jackson.core</groupId>
+            <artifactId>jackson-core</artifactId>
+          </exclusion>
+          <exclusion>
                 <groupId>tomcat</groupId>
                 <artifactId>jasper-runtime</artifactId>
             </exclusion>
+            <exclusion>
+                <groupId>xerces</groupId>
+                <artifactId>xercesImpl</artifactId>
+            </exclusion>
         </exclusions>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-common</artifactId>
       <version>${hadoop.version}</version>
-        <exclusions>
-            <exclusion>
-                <groupId>tomcat</groupId>
-                <artifactId>jasper-runtime</artifactId>
-            </exclusion>
-        </exclusions>
+      <exclusions>
+        <exclusion>
+          <groupId>tomcat</groupId>
+          <artifactId>jasper-runtime</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpclient</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpcore</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-core</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>javax.ws.rs</groupId>

+ 20 - 2
contrib/views/pig/src/main/java/org/apache/ambari/view/pig/resources/files/FileService.java

@@ -18,9 +18,11 @@
 
 package org.apache.ambari.view.pig.resources.files;
 
+import com.google.common.base.Optional;
 import com.google.inject.Inject;
 import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.ViewResourceHandler;
+import org.apache.ambari.view.commons.hdfs.ViewPropertyHelper;
 import org.apache.ambari.view.pig.services.BaseService;
 import org.apache.ambari.view.pig.utils.BadRequestFormattedException;
 import org.apache.ambari.view.pig.utils.FilePaginator;
@@ -53,8 +55,10 @@ import javax.ws.rs.core.Response;
 import javax.ws.rs.core.UriInfo;
 import java.io.FileNotFoundException;
 import java.io.IOException;
+import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.List;
+import java.util.Map;
 
 
 /**
@@ -70,6 +74,8 @@ import java.util.List;
  *      update file content
  */
 public class FileService extends BaseService {
+  public static final String VIEW_CONF_KEYVALUES = "view.conf.keyvalues";
+
   @Inject
   ViewResourceHandler handler;
 
@@ -213,7 +219,14 @@ public class FileService extends BaseService {
    */
   public static void hdfsSmokeTest(ViewContext context) {
     try {
-      HdfsApi api = HdfsUtil.connectToHDFSApi(context);
+      Optional<Map<String, String>> props = ViewPropertyHelper.getViewConfigs(context, VIEW_CONF_KEYVALUES);
+      HdfsApi api;
+      if(props.isPresent()){
+        api = HdfsUtil.connectToHDFSApi(context, props.get());
+      }else{
+        api = HdfsUtil.connectToHDFSApi(context);
+      }
+
       api.getStatus();
     } catch (WebApplicationException ex) {
       LOG.error("Error occurred : ", ex);
@@ -231,7 +244,7 @@ public class FileService extends BaseService {
    */
   public static void userhomeSmokeTest(ViewContext context) {
     try {
-      UserService  userservice = new UserService(context);
+      UserService  userservice = new UserService(context, getViewConfigs(context));
       userservice.homeDir();
     } catch (WebApplicationException ex) {
       throw ex;
@@ -253,4 +266,9 @@ public class FileService extends BaseService {
     }
     return filePath;
   }
+
+  private static Map<String,String> getViewConfigs(ViewContext context) {
+    Optional<Map<String, String>> props = ViewPropertyHelper.getViewConfigs(context, VIEW_CONF_KEYVALUES);
+    return props.isPresent()? props.get() : new HashMap<String, String>();
+  }
 }

+ 15 - 1
contrib/views/pig/src/main/java/org/apache/ambari/view/pig/utils/UserLocalObjects.java

@@ -18,7 +18,9 @@
 
 package org.apache.ambari.view.pig.utils;
 
+import com.google.common.base.Optional;
 import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.commons.hdfs.ViewPropertyHelper;
 import org.apache.ambari.view.pig.templeton.client.TempletonApi;
 import org.apache.ambari.view.pig.templeton.client.TempletonApiFactory;
 import org.apache.ambari.view.utils.UserLocal;
@@ -28,7 +30,11 @@ import org.apache.ambari.view.utils.hdfs.HdfsUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.util.Map;
+
 public class UserLocalObjects {
+  public static final String VIEW_CONF_KEYVALUES = "view.conf.keyvalues";
+
   private final static Logger LOG =
       LoggerFactory.getLogger(UserLocalObjects.class);
 
@@ -55,7 +61,15 @@ public class UserLocalObjects {
       @Override
       protected synchronized HdfsApi initialValue(ViewContext context) {
         try {
-          return HdfsUtil.connectToHDFSApi(context);
+          Optional<Map<String, String>> props = ViewPropertyHelper.getViewConfigs(context, VIEW_CONF_KEYVALUES);
+          HdfsApi api;
+          if(props.isPresent()){
+            api = HdfsUtil.connectToHDFSApi(context, props.get());
+          }else{
+            api = HdfsUtil.connectToHDFSApi(context);
+          }
+
+          return api;
         } catch (HdfsApiException e) {
           throw new ServiceFormattedException(e);
         }

+ 7 - 0
contrib/views/pig/src/main/resources/view.xml

@@ -194,6 +194,13 @@
         <required>false</required>
     </parameter>
 
+    <parameter>
+        <name>view.conf.keyvalues</name>
+        <description>The key values that will be copied to hdfs connection configuration verbatim.</description>
+        <label>View Configs</label>
+        <required>false</required>
+    </parameter>
+
     <resource>
         <name>script</name>
         <plural-name>scripts</plural-name>

+ 2 - 1
contrib/views/pom.xml

@@ -31,7 +31,8 @@
   <properties>
     <ambari.version>2.5.0.0.0</ambari.version>
     <ambari.dir>${project.parent.parent.basedir}</ambari.dir>
-    <hadoop.version>2.7.1</hadoop.version>
+    <hadoop.version>2.7.3</hadoop.version>
+    <aws-java-sdk.version>1.10.6</aws-java-sdk.version>
     <views.jars.dir>views-jars</views.jars.dir>
     <views.jars.dir.rel>../target/${views.jars.dir}</views.jars.dir.rel>
   </properties>

+ 70 - 12
contrib/views/utils/pom.xml

@@ -29,27 +29,71 @@
   </parent>
 
   <dependencies>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-aws</artifactId>
+      <version>${hadoop.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>com.amazonaws</groupId>
+          <artifactId>aws-java-sdk</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-databind</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-annotations</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>com.amazonaws</groupId>
+      <artifactId>aws-java-sdk-s3</artifactId>
+      <version>${aws-java-sdk.version}</version>
+    </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-hdfs</artifactId>
       <version>${hadoop.version}</version>
-        <exclusions>
-            <exclusion>
-                <groupId>tomcat</groupId>
-                <artifactId>jasper-runtime</artifactId>
-            </exclusion>
-        </exclusions>
+      <exclusions>
+        <exclusion>
+          <groupId>tomcat</groupId>
+          <artifactId>jasper-runtime</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-core</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>xerces</groupId>
+          <artifactId>xercesImpl</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-common</artifactId>
       <version>${hadoop.version}</version>
-        <exclusions>
-            <exclusion>
-                <groupId>tomcat</groupId>
-                <artifactId>jasper-runtime</artifactId>
-            </exclusion>
-        </exclusions>
+      <exclusions>
+        <exclusion>
+          <groupId>tomcat</groupId>
+          <artifactId>jasper-runtime</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpclient</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpcore</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-core</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
@@ -142,6 +186,20 @@
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-azure</artifactId>
       <version>${hadoop.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpclient</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-core</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.httpcomponents</groupId>
+          <artifactId>httpcore</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>commons-validator</groupId>

+ 87 - 72
contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/ConfigurationBuilder.java

@@ -19,19 +19,13 @@
 package org.apache.ambari.view.utils.hdfs;
 
 import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.utils.ambari.AmbariApi;
-import org.apache.ambari.view.utils.ambari.NoClusterAssociatedException;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.LocalFileSystem;
-import org.apache.hadoop.fs.azure.NativeAzureFileSystem;
-import org.apache.hadoop.fs.azure.Wasb;
-import org.apache.hadoop.hdfs.DistributedFileSystem;
-import org.apache.hadoop.hdfs.web.SWebHdfsFileSystem;
-import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.IOException;
+import java.io.StringWriter;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.util.Map;
@@ -47,27 +41,27 @@ public class ConfigurationBuilder {
   public static final String HDFS_SITE = "hdfs-site";
 
   public static final String DEFAULT_FS_INSTANCE_PROPERTY = "webhdfs.url";
-  public static final String DEFAULT_FS_CLUSTER_PROPERTY  = "fs.defaultFS";
+  public static final String DEFAULT_FS_CLUSTER_PROPERTY = "fs.defaultFS";
 
   public static final String NAMESERVICES_INSTANCE_PROPERTY = "webhdfs.nameservices";
-  public static final String NAMESERVICES_CLUSTER_PROPERTY  = "dfs.nameservices";
+  public static final String NAMESERVICES_CLUSTER_PROPERTY = "dfs.nameservices";
   public static final String HA_NAMENODES_INSTANCE_PROPERTY = "webhdfs.ha.namenodes.list";
 
-  public static final String HA_NAMENODES_CLUSTER_PROPERTY  = "dfs.ha.namenodes.%s";
+  public static final String HA_NAMENODES_CLUSTER_PROPERTY = "dfs.ha.namenodes.%s";
   public static final String NAMENODE_RPC_NN1_INSTANCE_PROPERTY = "webhdfs.ha.namenode.rpc-address.nn1";
   public static final String NAMENODE_RPC_NN2_INSTANCE_PROPERTY = "webhdfs.ha.namenode.rpc-address.nn2";
-  public static final String NAMENODE_RPC_NN_CLUSTER_PROPERTY   = "dfs.namenode.rpc-address.%s.%s";
+  public static final String NAMENODE_RPC_NN_CLUSTER_PROPERTY = "dfs.namenode.rpc-address.%s.%s";
 
   public static final String NAMENODE_HTTP_NN1_INSTANCE_PROPERTY = "webhdfs.ha.namenode.http-address.nn1";
   public static final String NAMENODE_HTTP_NN2_INSTANCE_PROPERTY = "webhdfs.ha.namenode.http-address.nn2";
-  public static final String NAMENODE_HTTP_NN_CLUSTER_PROPERTY   = "dfs.namenode.http-address.%s.%s";
+  public static final String NAMENODE_HTTP_NN_CLUSTER_PROPERTY = "dfs.namenode.http-address.%s.%s";
 
   public static final String NAMENODE_HTTPS_NN1_INSTANCE_PROPERTY = "webhdfs.ha.namenode.https-address.nn1";
   public static final String NAMENODE_HTTPS_NN2_INSTANCE_PROPERTY = "webhdfs.ha.namenode.https-address.nn2";
-  public static final String NAMENODE_HTTPS_NN_CLUSTER_PROPERTY   = "dfs.namenode.https-address.%s.%s";
+  public static final String NAMENODE_HTTPS_NN_CLUSTER_PROPERTY = "dfs.namenode.https-address.%s.%s";
 
   public static final String FAILOVER_PROXY_PROVIDER_INSTANCE_PROPERTY = "webhdfs.client.failover.proxy.provider";
-  public static final String FAILOVER_PROXY_PROVIDER_CLUSTER_PROPERTY  = "dfs.client.failover.proxy.provider.%s";
+  public static final String FAILOVER_PROXY_PROVIDER_CLUSTER_PROPERTY = "dfs.client.failover.proxy.provider.%s";
 
   public static final String UMASK_CLUSTER_PROPERTY = "fs.permissions.umask-mode";
   public static final String UMASK_INSTANCE_PROPERTY = "hdfs.umask-mode";
@@ -85,6 +79,8 @@ public class ConfigurationBuilder {
   private AuthConfigurationBuilder authParamsBuilder;
   private Map<String, String> authParams;
   private URI defaultFsUri;
+  private Map<String, String> customProperties;
+
   /**
    * Constructor of ConfigurationBuilder based on ViewContext
    * @param context ViewContext
@@ -94,6 +90,17 @@ public class ConfigurationBuilder {
     this.authParamsBuilder = new AuthConfigurationBuilder(context);
   }
 
+  /**
+   * takes context and any extra custom properties that needs to be included into config
+   * @param context
+   * @param customProperties
+   */
+  public ConfigurationBuilder(ViewContext context, Map<String, String> customProperties) {
+    this.context = context;
+    this.authParamsBuilder = new AuthConfigurationBuilder(context);
+    this.customProperties = customProperties;
+  }
+
   private void parseProperties() throws HdfsApiException {
     String defaultFS = getDefaultFS(context);
 
@@ -113,7 +120,7 @@ public class ConfigurationBuilder {
 
     } catch (URISyntaxException e) {
       throw new HdfsApiException("HDFS060 Invalid " + DEFAULT_FS_INSTANCE_PROPERTY +
-          "='" + defaultFS + "' URI", e);
+        "='" + defaultFS + "' URI", e);
     }
 
     conf.set("fs.defaultFS", defaultFS);
@@ -128,30 +135,30 @@ public class ConfigurationBuilder {
 
     defaultFS = addProtocolIfMissing(defaultFS);
 
-    if(context.getCluster() != null){
+    if (context.getCluster() != null) {
       try {
         URI fsUri = new URI(defaultFS);
         String protocol = fsUri.getScheme();
         String hostWithPort = defaultFS.substring(protocol.length() + 3);
 
-        Boolean webHdfsEnabled = Boolean.valueOf(getProperty(HDFS_SITE,DFS_WEBHDFS_ENABLED));
-        Boolean isHttps = DFS_HTTP_POLICY_HTTPS_ONLY.equals(getProperty(HDFS_SITE,DFS_HTTP_POLICY));
+        Boolean webHdfsEnabled = Boolean.valueOf(getProperty(HDFS_SITE, DFS_WEBHDFS_ENABLED));
+        Boolean isHttps = DFS_HTTP_POLICY_HTTPS_ONLY.equals(getProperty(HDFS_SITE, DFS_HTTP_POLICY));
 
         boolean isHA = isHAEnabled(defaultFS);
 
-        if(webHdfsEnabled && isHttps){
+        if (webHdfsEnabled && isHttps && "hdfs".equals(protocol)) {
           protocol = "swebhdfs";
-          String httpAddr = getProperty(HDFS_SITE,DFS_NAMENODE_HTTPS_ADDERSS);
-          if(!isHA && httpAddr != null) hostWithPort = httpAddr ;
-        }else if(webHdfsEnabled){
+          String httpAddr = getProperty(HDFS_SITE, DFS_NAMENODE_HTTPS_ADDERSS);
+          if (!isHA && httpAddr != null) hostWithPort = httpAddr;
+        } else if (webHdfsEnabled && "hdfs".equals(protocol)) {
           protocol = "webhdfs";
-          String httpsAddr = getProperty(HDFS_SITE,DFS_NAMENODE_HTTP_ADDERSS);
-          if(!isHA) hostWithPort = httpsAddr;
+          String httpsAddr = getProperty(HDFS_SITE, DFS_NAMENODE_HTTP_ADDERSS);
+          if (!isHA) hostWithPort = httpsAddr;
         }
 
-        return protocol + "://" +hostWithPort;
+        return protocol + "://" + hostWithPort;
       } catch (URISyntaxException e) {
-        throw new HdfsApiException("Invalid URI format."+e.getMessage(),e);
+        throw new HdfsApiException("Invalid URI format." + e.getMessage(), e);
       }
     }
     return defaultFS;
@@ -160,7 +167,7 @@ public class ConfigurationBuilder {
   private String getProperty(String type, String key, String instanceProperty) {
     String value;
 
-    if(context.getCluster() != null) {
+    if (context.getCluster() != null) {
       value = context.getCluster().getConfigurationValue(type, key);
     } else {
       value = context.getProperties().get(instanceProperty);
@@ -168,9 +175,9 @@ public class ConfigurationBuilder {
     return value;
   }
 
-  private String getProperty(String type,String key){
-    if(context.getCluster() != null){
-      return context.getCluster().getConfigurationValue(type,key);
+  private String getProperty(String type, String key) {
+    if (context.getCluster() != null) {
+      return context.getCluster().getConfigurationValue(type, key);
     }
     return null;
   }
@@ -178,7 +185,7 @@ public class ConfigurationBuilder {
   private void copyPropertyIfExists(String type, String key) {
     String value;
 
-    if(context.getCluster() != null) {
+    if (context.getCluster() != null) {
       value = context.getCluster().getConfigurationValue(type, key);
       if (value != null) {
         conf.set(key, value);
@@ -191,13 +198,35 @@ public class ConfigurationBuilder {
     }
   }
 
+  private void copyPropertiesBySite(String type) {
+    if (context.getCluster() != null) {
+      Map<String, String> configs = context.getCluster().getConfigByType(type);
+      LOG.debug("configs from core-site : {}", configs);
+      copyProperties(configs);
+    } else {
+      LOG.error("Cannot find cluster.");
+    }
+  }
+
+  private void copyProperties(Map<String, String> configs) {
+    if (null != configs) {
+      for(Map.Entry<String, String> entry : configs.entrySet()){
+        String key = entry.getKey();
+        String value = entry.getValue();
+        conf.set(key, value);
+      }
+    } else {
+      LOG.error("configs were null.");
+    }
+  }
+
   private void copyHAProperties(String defaultFS) throws URISyntaxException, HdfsApiException {
     URI uri = new URI(defaultFS);
     String nameservice = uri.getHost();
 
     copyClusterProperty(NAMESERVICES_CLUSTER_PROPERTY, NAMESERVICES_INSTANCE_PROPERTY);
     String namenodeIDs = copyClusterProperty(String.format(HA_NAMENODES_CLUSTER_PROPERTY, nameservice),
-                                             HA_NAMENODES_INSTANCE_PROPERTY);
+      HA_NAMENODES_INSTANCE_PROPERTY);
 
     String[] namenodes = namenodeIDs.split(",");
     if (namenodes.length != 2) {
@@ -205,22 +234,22 @@ public class ConfigurationBuilder {
     }
     //NN1
     copyClusterProperty(String.format(NAMENODE_RPC_NN_CLUSTER_PROPERTY, nameservice, namenodes[0]),
-                        NAMENODE_RPC_NN1_INSTANCE_PROPERTY);
+      NAMENODE_RPC_NN1_INSTANCE_PROPERTY);
     copyClusterProperty(String.format(NAMENODE_HTTP_NN_CLUSTER_PROPERTY, nameservice, namenodes[0]),
-                        NAMENODE_HTTP_NN1_INSTANCE_PROPERTY);
+      NAMENODE_HTTP_NN1_INSTANCE_PROPERTY);
     copyClusterProperty(String.format(NAMENODE_HTTPS_NN_CLUSTER_PROPERTY, nameservice, namenodes[0]),
       NAMENODE_HTTPS_NN1_INSTANCE_PROPERTY);
 
     //NN2
     copyClusterProperty(String.format(NAMENODE_RPC_NN_CLUSTER_PROPERTY, nameservice, namenodes[1]),
-                        NAMENODE_RPC_NN2_INSTANCE_PROPERTY);
+      NAMENODE_RPC_NN2_INSTANCE_PROPERTY);
     copyClusterProperty(String.format(NAMENODE_HTTP_NN_CLUSTER_PROPERTY, nameservice, namenodes[1]),
-                        NAMENODE_HTTP_NN2_INSTANCE_PROPERTY);
+      NAMENODE_HTTP_NN2_INSTANCE_PROPERTY);
     copyClusterProperty(String.format(NAMENODE_HTTPS_NN_CLUSTER_PROPERTY, nameservice, namenodes[1]),
       NAMENODE_HTTPS_NN2_INSTANCE_PROPERTY);
 
     copyClusterProperty(String.format(FAILOVER_PROXY_PROVIDER_CLUSTER_PROPERTY, nameservice),
-                        FAILOVER_PROXY_PROVIDER_INSTANCE_PROPERTY);
+      FAILOVER_PROXY_PROVIDER_INSTANCE_PROPERTY);
   }
 
   private String copyClusterProperty(String propertyName, String instancePropertyName) {
@@ -236,7 +265,7 @@ public class ConfigurationBuilder {
     URI uri = new URI(defaultFS);
     String nameservice = uri.getHost();
     String namenodeIDs = getProperty(HDFS_SITE, String.format(HA_NAMENODES_CLUSTER_PROPERTY, nameservice),
-                                     HA_NAMENODES_INSTANCE_PROPERTY);
+      HA_NAMENODES_INSTANCE_PROPERTY);
     return namenodeIDs != null;
   }
 
@@ -280,49 +309,35 @@ public class ConfigurationBuilder {
   public Configuration buildConfig() throws HdfsApiException {
     parseProperties();
     setAuthParams(buildAuthenticationConfig());
+    copyPropertiesBySite(CORE_SITE);
+    copyPropertiesBySite(HDFS_SITE);
 
     String umask = context.getProperties().get(UMASK_INSTANCE_PROPERTY);
-    if(umask != null && !umask.isEmpty()) conf.set(UMASK_CLUSTER_PROPERTY,umask);
+    if (umask != null && !umask.isEmpty()) conf.set(UMASK_CLUSTER_PROPERTY, umask);
 
-    conf.set("fs.hdfs.impl", DistributedFileSystem.class.getName());
-    conf.set("fs.webhdfs.impl", WebHdfsFileSystem.class.getName());
-    conf.set("fs.file.impl", LocalFileSystem.class.getName());
-    conf.set("fs.swebhdfs.impl", SWebHdfsFileSystem.class.getName());
-    
-    configureWASB();
-    configureADL();
+    if(null != this.customProperties){
+      copyProperties(this.customProperties);
+    }
+
+    if(LOG.isDebugEnabled()){
+      LOG.debug("final conf : {}", printConf());
+    }
 
     return conf;
   }
 
-  /**
-   * Fill Azure Blob Storage properties if wasb:// scheme configured
-   */
-  public void configureWASB() {
-    LOG.debug("defaultFsUri.getScheme() == " + defaultFsUri.getScheme());
-    if (defaultFsUri.getScheme().equals("wasb")) {
-      conf.set("fs.AbstractFileSystem.wasb.impl", Wasb.class.getName());
-      conf.set("fs.wasb.impl", NativeAzureFileSystem.class.getName());
-
-      String account = defaultFsUri.getHost();
-      LOG.debug("WASB account == " + account);
-      copyPropertyIfExists(CORE_SITE, "fs.azure.account.key." + account);
-      copyPropertyIfExists(CORE_SITE, "fs.azure.account.keyprovider." + account);
-      copyPropertyIfExists(CORE_SITE, "fs.azure.shellkeyprovider.script");
+  private String printConf() {
+    try {
+      StringWriter stringWriter = new StringWriter();
+      conf.writeXml(stringWriter);
+      stringWriter.close();
+      return stringWriter.toString().replace("\n", "");
+    } catch (IOException e) {
+      LOG.error("error while converting conf to xml : ", e);
+      return "";
     }
   }
 
-  /**
-   *  Fill adl properties if adl:// scheme configured
-   */
-  public void configureADL() {
-    if (defaultFsUri.getScheme().equals("adl")) {
-      conf.set("fs.adl.impl", "com.microsoft.azure.datalake.store.AdlFileSystem");
-      copyPropertyIfExists(CORE_SITE,"dfs.webhdfs.oauth2.access.token.provider");
-      copyPropertyIfExists(CORE_SITE,"fs.azure.datalake.token.provider.service.urls");
-      copyPropertyIfExists(CORE_SITE,"fs.azure.datalake.token.provider.script");
-    }
-  }
 
   /**
    * Builds the authentication configuration

+ 16 - 13
contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java

@@ -19,26 +19,29 @@
 package org.apache.ambari.view.utils.hdfs;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.*;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.fs.FsStatus;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.Trash;
+import org.apache.hadoop.fs.TrashPolicy;
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
-
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.security.PrivilegedExceptionAction;
-import java.util.*;
-
-import org.apache.hadoop.hdfs.DistributedFileSystem;
-import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
-import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.json.simple.JSONArray;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import javax.security.auth.Subject;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.security.PrivilegedExceptionAction;
+import java.util.Arrays;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
 
 /**
  * Hdfs Business Delegate

+ 44 - 23
contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java

@@ -27,7 +27,6 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
-import java.util.HashMap;
 import java.util.Map;
 
 public class HdfsUtil {
@@ -107,41 +106,63 @@ public class HdfsUtil {
     return newFilePath;
   }
 
+  /**
+   * takes any custom properties that a view wants to be included into the config
+   * @param context
+   * @param customViewProperties
+   * @return
+   * @throws HdfsApiException
+   */
+  public static synchronized HdfsApi connectToHDFSApi(ViewContext context, Map<String, String> customViewProperties)
+    throws HdfsApiException {
+    ClassLoader currentClassLoader = Thread.currentThread().getContextClassLoader();
+    Thread.currentThread().setContextClassLoader(HdfsUtil.class.getClassLoader());
+    try {
+      ConfigurationBuilder configurationBuilder = new ConfigurationBuilder(context, customViewProperties);
+      return getHdfsApi(context, configurationBuilder);
+    } finally {
+      Thread.currentThread().setContextClassLoader(currentClassLoader);
+    }
+  }
+
   /**
    * Factory of HdfsApi for specific ViewContext
    * @param context ViewContext that contains connection credentials
    * @return HdfsApi object
    */
   public static synchronized HdfsApi connectToHDFSApi(ViewContext context) throws HdfsApiException {
-    HdfsApi api = null;
     ClassLoader currentClassLoader = Thread.currentThread().getContextClassLoader();
-    Thread.currentThread().setContextClassLoader(null);
+    Thread.currentThread().setContextClassLoader(HdfsUtil.class.getClassLoader());
     try {
       ConfigurationBuilder configurationBuilder = new ConfigurationBuilder(context);
-      AuthConfigurationBuilder authConfigurationBuilder = new AuthConfigurationBuilder(context);
-
-      Map<String, String> authParams = authConfigurationBuilder.build();
-      configurationBuilder.setAuthParams(authParams);
-
-      try {
-        api = new HdfsApi(configurationBuilder, getHdfsUsername(context));
-        LOG.info("HdfsApi connected OK");
-      } catch (IOException e) {
-        String message = "HDFS040 Couldn't open connection to HDFS";
-        LOG.error(message);
-        throw new HdfsApiException(message, e);
-      } catch (InterruptedException e) {
-        String message = "HDFS041 Couldn't open connection to HDFS";
-        LOG.error(message);
-        throw new HdfsApiException(message, e);
-      }
-      return api;
-    }
-    finally {
+      return getHdfsApi(context, configurationBuilder);
+    } finally {
       Thread.currentThread().setContextClassLoader(currentClassLoader);
     }
   }
 
+  private static HdfsApi getHdfsApi(ViewContext context, ConfigurationBuilder configurationBuilder) throws HdfsApiException {
+    HdfsApi api = null;
+    AuthConfigurationBuilder authConfigurationBuilder = new AuthConfigurationBuilder(context);
+    Map<String, String> authParams = authConfigurationBuilder.build();
+    configurationBuilder.setAuthParams(authParams);
+    try {
+      api = new HdfsApi(configurationBuilder, getHdfsUsername(context));
+      LOG.info("HdfsApi connected OK");
+    } catch (IOException e) {
+      LOG.error("exception occurred while creating hdfsApi objcet : {}", e.getMessage(), e);
+      String message = "HDFS040 Couldn't open connection to HDFS";
+      LOG.error(message);
+      throw new HdfsApiException(message, e);
+    } catch (InterruptedException e) {
+      LOG.error("exception occurred while creating hdfsApi objcet : {}", e.getMessage(), e);
+      String message = "HDFS041 Couldn't open connection to HDFS";
+      LOG.error(message);
+      throw new HdfsApiException(message, e);
+    }
+    return api;
+  }
+
   /**
    * Returns username for HdfsApi from "webhdfs.username" property if set,
    * if not set then current Ambari username

+ 20 - 0
contrib/views/wfmanager/pom.xml

@@ -88,6 +88,18 @@
 					<groupId>tomcat</groupId>
 					<artifactId>jasper-runtime</artifactId>
 				</exclusion>
+				<exclusion>
+					<groupId>org.apache.httpcomponents</groupId>
+					<artifactId>httpclient</artifactId>
+				</exclusion>
+				<exclusion>
+					<groupId>org.apache.httpcomponents</groupId>
+					<artifactId>httpcore</artifactId>
+				</exclusion>
+				<exclusion>
+					<groupId>com.fasterxml.jackson.core</groupId>
+					<artifactId>jackson-core</artifactId>
+				</exclusion>
 			</exclusions>
 		</dependency>
 		<dependency>
@@ -95,10 +107,18 @@
 			<artifactId>hadoop-hdfs</artifactId>
 			<version>${hadoop.version}</version>
 			<exclusions>
+				<exclusion>
+					<groupId>com.fasterxml.jackson.core</groupId>
+					<artifactId>jackson-core</artifactId>
+				</exclusion>
 				<exclusion>
 					<groupId>tomcat</groupId>
 					<artifactId>jasper-runtime</artifactId>
 				</exclusion>
+				<exclusion>
+					<groupId>xerces</groupId>
+					<artifactId>xercesImpl</artifactId>
+				</exclusion>
 			</exclusions>
 		</dependency>
 

+ 15 - 5
contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/FileServices.java

@@ -17,14 +17,19 @@
  */
 package org.apache.oozie.ambari.view;
 
-import javax.ws.rs.Path;
-
+import com.google.common.base.Optional;
 import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.commons.hdfs.FileOperationService;
 import org.apache.ambari.view.commons.hdfs.UploadService;
 import org.apache.ambari.view.commons.hdfs.UserService;
+import org.apache.ambari.view.commons.hdfs.ViewPropertyHelper;
+
+import javax.ws.rs.Path;
+import java.util.HashMap;
+import java.util.Map;
 
 public class FileServices {
+	public static final String VIEW_CONF_KEYVALUES = "view.conf.keyvalues";
 
 	private ViewContext context;
 
@@ -38,7 +43,7 @@ public class FileServices {
 	 */
 	@Path("/upload")
 	public UploadService upload() {
-		return new UploadService(context);
+		return new UploadService(context, getViewConfigs());
 	}
 
 	/**
@@ -47,7 +52,7 @@ public class FileServices {
 	 */
 	@Path("/fileops")
 	public FileOperationService fileOps() {
-		return new FileOperationService(context);
+		return new FileOperationService(context, getViewConfigs());
 	}
 
 	/**
@@ -56,6 +61,11 @@ public class FileServices {
 	 */
 	@Path("/user")
 	public UserService userService() {
-		return new UserService(context);
+		return new UserService(context, getViewConfigs());
+	}
+
+	private Map<String,String> getViewConfigs() {
+		Optional<Map<String, String>> props = ViewPropertyHelper.getViewConfigs(context, VIEW_CONF_KEYVALUES);
+		return props.isPresent()? props.get() : new HashMap<String, String>();
 	}
 }

+ 17 - 4
contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/HDFSFileUtils.java

@@ -17,10 +17,9 @@
  */
 package org.apache.oozie.ambari.view;
 
-import java.io.FileNotFoundException;
-import java.io.IOException;
-
+import com.google.common.base.Optional;
 import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.commons.hdfs.ViewPropertyHelper;
 import org.apache.ambari.view.utils.hdfs.HdfsApi;
 import org.apache.ambari.view.utils.hdfs.HdfsUtil;
 import org.apache.hadoop.fs.FSDataInputStream;
@@ -29,7 +28,13 @@ import org.apache.hadoop.fs.FileStatus;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.util.Map;
+
 public class HDFSFileUtils {
+	public static final String VIEW_CONF_KEYVALUES = "view.conf.keyvalues";
+
 	private final static Logger LOGGER = LoggerFactory
 			.getLogger(HDFSFileUtils.class);
 	private ViewContext viewContext;
@@ -84,7 +89,15 @@ public class HDFSFileUtils {
 
 	private HdfsApi getHdfsgetApi() {
 		try {
-			return HdfsUtil.connectToHDFSApi(viewContext);
+			Optional<Map<String, String>> props = ViewPropertyHelper.getViewConfigs(viewContext, VIEW_CONF_KEYVALUES);
+			HdfsApi api;
+			if(props.isPresent()){
+				api = HdfsUtil.connectToHDFSApi(viewContext, props.get());
+			}else{
+				api = HdfsUtil.connectToHDFSApi(viewContext);
+			}
+
+			return api;
 		} catch (Exception ex) {
 			LOGGER.error("Error in getting HDFS Api", ex);
 			throw new RuntimeException(

+ 8 - 0
contrib/views/wfmanager/src/main/resources/view.xml

@@ -137,6 +137,14 @@
         <required>true</required>
     </parameter>
 
+  <parameter>
+    <name>view.conf.keyvalues</name>
+    <description>The key values that will be copied to hdfs connection configuration verbatim. Format : key1=value1;
+      key2=value2</description>
+    <label>View Configs</label>
+    <required>false</required>
+  </parameter>
+
     <persistence>
         <!--
         <entity>