فهرست منبع

AMBARI-11072. Files View: Support Auto Create and Cluster Association (alexantonenko)

Alex Antonenko 10 سال پیش
والد
کامیت
7ccaca9859

+ 0 - 1
contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/DownloadService.java

@@ -53,7 +53,6 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.ambari.view.ViewContext;
 import org.apache.hadoop.security.AccessControlException;
 import org.json.simple.JSONObject;
-//import org.glassfish.jersey.server.ChunkedOutput;
 
 /**
  * Service for download and aggregate files

+ 5 - 2
contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/FileOperationService.java

@@ -33,6 +33,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.filebrowser.utils.NotFoundFormattedException;
 import org.apache.ambari.view.filebrowser.utils.ServiceFormattedException;
+import org.json.simple.JSONObject;
 
 /**
  * File operations service
@@ -57,8 +58,10 @@ public class FileOperationService extends HdfsService {
   @Produces(MediaType.APPLICATION_JSON)
   public Response listdir(@QueryParam("path") String path) {
     try {
-      return Response.ok(
-          getApi(context).fileStatusToJSON(getApi(context).listdir(path))).build();
+      JSONObject response = new JSONObject();
+      response.put("files", getApi(context).fileStatusToJSON(getApi(context).listdir(path)));
+      response.put("meta", getApi(context).fileStatusToJSON(getApi(context).getFileStatus(path)));
+      return Response.ok(response).build();
     } catch (WebApplicationException ex) {
       throw ex;
     } catch (FileNotFoundException ex) {

+ 5 - 0
contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsApi.java

@@ -33,6 +33,8 @@ import java.util.Map;
 
 import org.apache.hadoop.security.UserGroupInformation;
 import org.json.simple.JSONArray;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.LinkedHashMap;
 
@@ -40,6 +42,8 @@ import java.util.LinkedHashMap;
  * Hdfs Business Delegate
  */
 public class HdfsApi {
+  protected static final Logger logger = LoggerFactory.getLogger(HdfsApi.class);
+
   private final Configuration conf = new Configuration();
   private final Map<String, String> params;
 
@@ -55,6 +59,7 @@ public class HdfsApi {
    */
   public HdfsApi(final String defaultFs, String username, Map<String, String> params) throws IOException,
       InterruptedException {
+    logger.info("Files View HdfsApi is connecting to '%s'", defaultFs);
     this.params = params;
     conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
     conf.set("fs.webhdfs.impl", "org.apache.hadoop.hdfs.web.WebHdfsFileSystem");

+ 14 - 3
contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsService.java

@@ -21,7 +21,6 @@ package org.apache.ambari.view.filebrowser;
 import javax.xml.bind.annotation.XmlRootElement;
 
 import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.filebrowser.utils.MisconfigurationFormattedException;
 import org.apache.ambari.view.filebrowser.utils.ServiceFormattedException;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.slf4j.Logger;
@@ -70,8 +69,9 @@ public abstract class HdfsService {
     if (_api == null) {
 //      Thread.currentThread().setContextClassLoader(null);
       String defaultFs = context.getProperties().get("webhdfs.url");
-      if (defaultFs == null)
-        throw new MisconfigurationFormattedException("webhdfs.url");
+
+      defaultFs = normalizeFsUrl(defaultFs);
+
       try {
         _api = new HdfsApi(defaultFs, getDoAsUsername(context), getHdfsAuthParams(context));
       } catch (Exception ex) {
@@ -81,6 +81,17 @@ public abstract class HdfsService {
     return _api;
   }
 
+  protected static String normalizeFsUrl(String defaultFs) {
+    //TODO: Don't add port if HA is enabled
+    if (!defaultFs.matches("^[^:]+://.*$"))
+      defaultFs = "webhdfs://" + defaultFs;
+
+    if (!defaultFs.matches("^.*:\\d+$"))
+      defaultFs = defaultFs + ":50070";
+
+    return defaultFs;
+  }
+
   private static Map<String, String> getHdfsAuthParams(ViewContext context) {
     String auth = context.getProperties().get("webhdfs.auth");
     Map<String, String> params = new HashMap<String, String>();

+ 72 - 0
contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/PropertyValidator.java

@@ -0,0 +1,72 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.filebrowser;
+
+import org.apache.ambari.view.ViewInstanceDefinition;
+import org.apache.ambari.view.validation.ValidationResult;
+import org.apache.ambari.view.validation.Validator;
+
+import java.net.MalformedURLException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.URL;
+
+public class PropertyValidator implements Validator {
+
+  public static final String WEBHDFS_URL = "webhdfs.url";
+
+  @Override
+  public ValidationResult validateInstance(ViewInstanceDefinition viewInstanceDefinition, ValidationContext validationContext) {
+    return null;
+  }
+
+  @Override
+  public ValidationResult validateProperty(String property, ViewInstanceDefinition viewInstanceDefinition, ValidationContext validationContext) {
+    if (property.equals(WEBHDFS_URL)) {
+      String webhdfsUrl = viewInstanceDefinition.getPropertyMap().get(WEBHDFS_URL);
+      try {
+        new URI(webhdfsUrl);
+      } catch (URISyntaxException e) {
+        return new InvalidPropertyValidationResult(false, "Must be valid URL");
+      }
+    }
+    return ValidationResult.SUCCESS;
+  }
+
+  public static class InvalidPropertyValidationResult implements ValidationResult {
+    private boolean valid;
+    private String detail;
+
+    public InvalidPropertyValidationResult(boolean valid, String detail) {
+      this.valid = valid;
+      this.detail = detail;
+    }
+
+    @Override
+    public boolean isValid() {
+      return valid;
+    }
+
+    @Override
+    public String getDetail() {
+      return detail;
+    }
+  }
+
+}

+ 1 - 1
contrib/views/files/src/main/resources/ui/app/components/uploader.js

@@ -115,4 +115,4 @@ App.FileUploaderComponent = Ember.Component.extend({
     readonly:true,
     classNames:['form-control']
   })
-});
+});

+ 16 - 1
contrib/views/files/src/main/resources/view.xml

@@ -21,17 +21,22 @@
 
     <min-ambari-version>1.7.*</min-ambari-version>
 
+    <validator-class>org.apache.ambari.view.filebrowser.PropertyValidator</validator-class>
+
     <parameter>
         <name>webhdfs.url</name>
         <description>Enter the WebHDFS FileSystem URI. Typically this is the dfs.namenode.http-address property in the hdfs-site.xml configuration. URL must be accessible from Ambari Server.</description>
         <label>WebHDFS FileSystem URI</label>
         <placeholder>webhdfs://namenode:50070</placeholder>
+        <default-value>webhdfs://localhost:50070</default-value>
         <required>true</required>
+        <cluster-config>hdfs-site/dfs.namenode.http-address</cluster-config>
     </parameter>
     <parameter>
         <name>webhdfs.username</name>
         <description>doAs for proxy user for HDFS. By default, uses the currently logged-in Ambari user.</description>
         <label>WebHDFS Username</label>
+        <default-value>${username}</default-value>
         <required>false</required>
     </parameter>
     <parameter>
@@ -39,6 +44,7 @@
         <description>Semicolon-separated authentication configs.</description>
         <placeholder>auth=SIMPLE</placeholder>
         <default-value>auth=SIMPLE</default-value>
+        <label>WebHDFS Authorization</label>
         <required>false</required>
     </parameter>
 
@@ -46,5 +52,14 @@
         <name>files</name>
         <service-class>org.apache.ambari.view.filebrowser.FileBrowserService</service-class>
     </resource>
-  
+
+    <auto-instance>
+        <name>AUTO_INSTANCE</name>
+        <label>Auto Create instance for the Files view</label>
+        <description>This view instance is auto created when the HDFS service is added to a cluster.</description>
+        <stack-id>HDP-2.*</stack-id>
+        <services>
+            <service>HDFS</service>
+        </services>
+    </auto-instance>
 </view>

+ 5 - 2
contrib/views/files/src/test/java/org/apache/ambari/view/filebrowser/FilebrowserTest.java

@@ -41,6 +41,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.json.simple.JSONArray;
+import org.json.simple.JSONObject;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
@@ -108,7 +109,8 @@ public class FilebrowserTest{
     request.path = "/tmp1";
     fileBrowserService.fileOps().mkdir(request);
     Response response = fileBrowserService.fileOps().listdir("/");
-    JSONArray statuses = (JSONArray) response.getEntity();
+    JSONObject responseObject = (JSONObject) response.getEntity();
+    JSONArray statuses = (JSONArray) responseObject.get("files");
     System.out.println(response.getEntity());
     Assert.assertEquals(200, response.getStatus());
     Assert.assertTrue(statuses.size() > 0);
@@ -137,7 +139,8 @@ public class FilebrowserTest{
     Response response = uploadFile("/tmp/", "testUpload", ".tmp", "Hello world");
     Assert.assertEquals(200, response.getStatus());
     Response listdir = fileBrowserService.fileOps().listdir("/tmp");
-    JSONArray statuses = (JSONArray) listdir.getEntity();
+    JSONObject responseObject = (JSONObject) listdir.getEntity();
+    JSONArray statuses = (JSONArray) responseObject.get("files");
     System.out.println(statuses.size());
     Response response2 = fileBrowserService.download().browse("/tmp/testUpload.tmp", false, httpHeaders, uriInfo);
     Assert.assertEquals(200, response2.getStatus());

+ 49 - 0
contrib/views/files/src/test/java/org/apache/ambari/view/filebrowser/HdfsServiceTest.java

@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.filebrowser;
+
+import org.junit.Test;
+
+import static org.junit.Assert.*;
+
+public class HdfsServiceTest {
+  @Test
+  public void testNormalizeFsUrlWithoutProtocol() throws Exception {
+    String normalized = HdfsService.normalizeFsUrl("namenode.example.com:50070");
+    assertEquals(normalized, "webhdfs://namenode.example.com:50070");
+  }
+
+  @Test
+  public void testNormalizeFsUrlWithoutPort() throws Exception {
+    String normalized = HdfsService.normalizeFsUrl("webhdfs://namenode.example.com");
+    assertEquals(normalized, "webhdfs://namenode.example.com:50070");
+  }
+
+  @Test
+  public void testNormalizeFsUrlOnlyHostname() throws Exception {
+    String normalized = HdfsService.normalizeFsUrl("namenode.example.com");
+    assertEquals(normalized, "webhdfs://namenode.example.com:50070");
+  }
+
+  @Test
+  public void testNormalizeFsUrlFixNoCorrectUrl() throws Exception {
+    String normalized = HdfsService.normalizeFsUrl("webhdfs://namenode.example.com:50070");
+    assertEquals(normalized, "webhdfs://namenode.example.com:50070");
+  }
+}