Переглянути джерело

AMBARI-20517. make home directory check as optional in hive20 view (Nitiraj Rathore via pallavkul)

pallavkul 8 роки тому
батько
коміт
77d231b44d

+ 25 - 0
contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/Constants.java

@@ -0,0 +1,25 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20;
+
+public interface Constants {
+  String VIEW_CONF_KEYVALUES = "view.conf.keyvalues";
+  String DEFAULT_FS = "fs.defaultFS";
+  String AMBARI_SKIP_HOME_DIRECTORY_CHECK_PROTOCOL_LIST = "views.skip.home-directory-check.file-system.list";
+}

+ 2 - 2
contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/internal/HdfsApiSupplier.java

@@ -21,6 +21,7 @@ package org.apache.ambari.view.hive20.internal;
 import com.google.common.base.Optional;
 import org.apache.ambari.view.ViewContext;
 import org.apache.ambari.view.commons.hdfs.ViewPropertyHelper;
+import org.apache.ambari.view.hive20.Constants;
 import org.apache.ambari.view.utils.hdfs.HdfsApi;
 import org.apache.ambari.view.utils.hdfs.HdfsApiException;
 import org.apache.ambari.view.utils.hdfs.HdfsUtil;
@@ -31,7 +32,6 @@ import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
 
 public class HdfsApiSupplier implements ContextSupplier<Optional<HdfsApi>> {
-  public static final String VIEW_CONF_KEYVALUES = "view.conf.keyvalues";
 
   protected final Logger LOG =
     LoggerFactory.getLogger(getClass());
@@ -46,7 +46,7 @@ public class HdfsApiSupplier implements ContextSupplier<Optional<HdfsApi>> {
         synchronized (lock) {
           if(!hdfsApiMap.containsKey(getKey(context))) {
             LOG.debug("Creating HDFSApi instance for Viewname: {}, Instance Name: {}", context.getViewName(), context.getInstanceName());
-            Optional<Map<String, String>> props = ViewPropertyHelper.getViewConfigs(context, VIEW_CONF_KEYVALUES);
+            Optional<Map<String, String>> props = ViewPropertyHelper.getViewConfigs(context, Constants.VIEW_CONF_KEYVALUES);
             HdfsApi api;
             if(props.isPresent()){
               api = HdfsUtil.connectToHDFSApi(context, props.get());

+ 133 - 0
contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/system/ServiceCheck.java

@@ -0,0 +1,133 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive20.resources.system;
+
+import com.google.common.base.Optional;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.commons.hdfs.ViewPropertyHelper;
+import org.apache.ambari.view.hive20.Constants;
+import org.apache.ambari.view.utils.hdfs.ConfigurationBuilder;
+import org.apache.ambari.view.utils.hdfs.HdfsApiException;
+import org.apache.hadoop.conf.Configuration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.*;
+
+public class ServiceCheck {
+  protected static final Logger LOG = LoggerFactory.getLogger(ServiceCheck.class);
+
+  private final ViewContext viewContext;
+
+  ServiceCheck(ViewContext viewContext){
+    this.viewContext = viewContext;
+  }
+
+  public static class Policy {
+    private boolean checkHdfs = true;
+    private boolean checkHomeDirectory = true;
+    private boolean checkHive = true;
+    private boolean checkATS = true;
+
+    public Policy() {
+    }
+
+    public Policy(boolean checkHdfs, boolean checkHomeDirectory, boolean checkHive, boolean checkATS) {
+      this.checkHdfs = checkHdfs;
+      this.checkHomeDirectory = checkHomeDirectory;
+      this.checkHive = checkHive;
+      this.checkATS = checkATS;
+    }
+
+    public boolean isCheckHdfs() {
+      return checkHdfs;
+    }
+
+    public void setCheckHdfs(boolean checkHdfs) {
+      this.checkHdfs = checkHdfs;
+    }
+
+    public boolean isCheckHomeDirectory() {
+      return checkHomeDirectory;
+    }
+
+    public void setCheckHomeDirectory(boolean checkHomeDirectory) {
+      this.checkHomeDirectory = checkHomeDirectory;
+    }
+
+    public boolean isCheckHive() {
+      return checkHive;
+    }
+
+    public void setCheckHive(boolean checkHive) {
+      this.checkHive = checkHive;
+    }
+
+    public boolean isCheckATS() {
+      return checkATS;
+    }
+
+    public void setCheckATS(boolean checkATS) {
+      this.checkATS = checkATS;
+    }
+
+    @Override
+    public String toString() {
+      return "Policy{" +
+        "checkHdfs=" + checkHdfs +
+        ", checkHomeDirectory=" + checkHomeDirectory +
+        ", checkHive=" + checkHive +
+        ", checkATS=" + checkATS +
+        '}';
+    }
+  }
+
+  public Policy getServiceCheckPolicy() throws HdfsApiException {
+    Policy policy = new Policy();
+    Optional<Map<String, String>> viewConfigs = ViewPropertyHelper.getViewConfigs(viewContext, Constants.VIEW_CONF_KEYVALUES);
+    ConfigurationBuilder configBuilder;
+    if(viewConfigs.isPresent()) {
+      configBuilder = new ConfigurationBuilder(this.viewContext, viewConfigs.get());
+    }else{
+      configBuilder = new ConfigurationBuilder(this.viewContext);
+    }
+
+    Configuration configurations = configBuilder.buildConfig();
+    String defaultFS = configurations.get(Constants.DEFAULT_FS);
+
+    URI fsUri = null;
+    try {
+      fsUri = new URI(defaultFS);
+      String protocol = fsUri.getScheme();
+      String ambariSkipCheckValues = viewContext.getAmbariProperty(Constants.AMBARI_SKIP_HOME_DIRECTORY_CHECK_PROTOCOL_LIST);
+      List<String> protocolSkipList = (ambariSkipCheckValues == null? new LinkedList<String>() : Arrays.asList(ambariSkipCheckValues.split(",")));
+      if(null != protocol && protocolSkipList.contains(protocol)){
+        policy.setCheckHomeDirectory(false);
+        return policy;
+      }
+    } catch (URISyntaxException e) {
+      LOG.error("Error occurred while parsing the defaultFS URI.", e);
+      return policy;
+    }
+
+    return policy;
+  }
+}

+ 18 - 0
contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/system/SystemService.java

@@ -18,7 +18,9 @@
 
 package org.apache.ambari.view.hive20.resources.system;
 
+import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
 import javax.inject.Inject;
 import javax.ws.rs.GET;
 import javax.ws.rs.Path;
@@ -27,6 +29,8 @@ import javax.ws.rs.core.Response;
 
 import org.apache.ambari.view.hive20.BaseService;
 import org.apache.ambari.view.hive20.resources.system.ranger.RangerService;
+import org.apache.ambari.view.hive20.utils.ServiceFormattedException;
+import org.apache.ambari.view.utils.hdfs.HdfsApiException;
 import org.json.simple.JSONObject;
 
 /**
@@ -56,4 +60,18 @@ public class SystemService extends BaseService {
     return Response.ok(response).build();
   }
 
+  @GET
+  @Path("/service-check-policy")
+  public Response getServiceCheckList(){
+    ServiceCheck serviceCheck = new ServiceCheck(context);
+    try {
+      ServiceCheck.Policy policy = serviceCheck.getServiceCheckPolicy();
+      JSONObject policyJson = new JSONObject();
+      policyJson.put("serviceCheckPolicy", policy);
+      return Response.ok(policyJson).build();
+    } catch (HdfsApiException e) {
+      LOG.error("Error occurred while generating service check policy : ", e);
+      throw new ServiceFormattedException(e);
+    }
+  }
 }

+ 5 - 0
contrib/views/hive20/src/main/resources/ui/app/adapters/service-check.js

@@ -20,6 +20,11 @@ import ApplicationAdapter from './application';
 
 export default ApplicationAdapter.extend({
 
+  getServiceCheckPolicy(){
+    let url = this.buildURL() + '/system/service-check-policy';
+    return this.ajax(url, 'GET');
+  },
+
   doHdfsSeriveCheck() {
     let url = this.buildURL() + '/hive/hdfsStatus';
     return this.ajax(url, 'GET');

+ 1 - 1
contrib/views/hive20/src/main/resources/ui/app/configs/service-check-status.js

@@ -16,4 +16,4 @@
  * limitations under the License.
  */
 
-export default { notStarted: 'NOT_STARTED', started: 'STARTED', completed: 'COMPLETED', errored: 'ERRORED'};
+export default { notStarted: 'NOT_STARTED', started: 'STARTED', completed: 'COMPLETED', errored: 'ERRORED', skipped : 'SKIPPED'};

+ 0 - 17
contrib/views/hive20/src/main/resources/ui/app/controllers/service-check.js

@@ -45,22 +45,5 @@ export default Ember.Controller.extend({
 
   init() {
     this._super(...arguments);
-    this.get('serviceCheck').check().then((data) => {
-      if(data.userHomePromise.state === 'rejected') {
-        this.set('userHomeError', data.userHomePromise.reason.errors);
-      }
-
-      if(data.hdfsPromise.state === 'rejected') {
-        this.set('userHomeError', data.hdfsPromise.reason.errors);
-      }
-
-      if(data.atsPromise.state === 'rejected') {
-        this.set('atsError', data.atsError.reason.errors);
-      }
-
-      if(data.hivePromise.state === 'rejected') {
-        this.set('atsError', data.hiveError.reason.errors);
-      }
-    });
   }
 });

+ 33 - 0
contrib/views/hive20/src/main/resources/ui/app/routes/service-check.js

@@ -24,5 +24,38 @@ export default Ember.Route.extend({
     if (this.get('serviceCheck.checkCompleted')) {
       this.transitionTo('application');
     }
+  },
+
+  model(){
+    let promise =  this.get("serviceCheck").fetchServiceCheckPolicy();
+    promise.then((data) => {
+      console.log("data : ", data);
+      this.set("serviceCheckPolicy", data.serviceCheckPolicy)
+    });
+
+    return promise;
+  },
+
+  afterModel(){
+    let controller = this.controllerFor("service-check");
+    controller.set("serviceCheckPolicy", this.get("serviceCheckPolicy"));
+    this.get('serviceCheck').check(this.get("serviceCheckPolicy")).then((data) => {
+      if(data.userHomePromise.state === 'rejected') {
+        controller.set('userHomeError', data.userHomePromise.reason.errors);
+      }
+
+      if(data.hdfsPromise.state === 'rejected') {
+        controller.set('hdfsError', data.hdfsPromise.reason.errors);
+      }
+
+      if(data.atsPromise.state === 'rejected') {
+        controller.set('atsError', data.atsPromise.reason.errors);
+      }
+
+      if(data.hivePromise.state === 'rejected') {
+        controller.set('hiveError', data.hivePromise.reason.errors);
+      }
+    });
+
   }
 });

+ 62 - 10
contrib/views/hive20/src/main/resources/ui/app/services/service-check.js

@@ -25,21 +25,40 @@ export default Ember.Service.extend({
 
   transitionToApplication: false,
 
+  numberOfChecks: 4,
   hdfsCheckStatus: STATUS.notStarted,
   atsCheckStatus: STATUS.notStarted,
   userHomeCheckStatus: STATUS.notStarted,
   hiveCheckStatus: STATUS.notStarted,
   percentCompleted: Ember.computed('hdfsCheckStatus', 'atsCheckStatus', 'userHomeCheckStatus', 'hiveCheckStatus', function () {
+    // if all skipped then 100%
+    if(this.get("numberOfChecks") === 0){
+      return 100;
+    }
+
     let percent = 0;
-    percent += this.get('hdfsCheckStatus') === STATUS.completed ? 25 : 0;
-    percent += this.get('atsCheckStatus') === STATUS.completed ? 25 : 0;
-    percent += this.get('userHomeCheckStatus') === STATUS.completed ? 25 : 0;
-    percent += this.get('hiveCheckStatus') === STATUS.completed ? 25 : 0;
+    percent = this.getCompletedPercentage(percent, 'hdfsCheckStatus');
+    percent = this.getCompletedPercentage(percent, 'atsCheckStatus');
+    percent = this.getCompletedPercentage(percent, 'userHomeCheckStatus');
+    percent = this.getCompletedPercentage(percent, 'hiveCheckStatus');
+
     return percent;
   }),
 
+  getCompletedPercentage(currentPercent, checkStatus) {
+    if(this.get(checkStatus) === STATUS.skipped) {
+      return currentPercent;
+    }
+
+    return this.get(checkStatus) === STATUS.completed ? currentPercent + (100/this.get("numberOfChecks")) : currentPercent;
+  },
+
+  fetchServiceCheckPolicy(){
+    return this._getServiceCheckAdapter().getServiceCheckPolicy();
+  },
+
   checkCompleted: Ember.computed('percentCompleted', function () {
-    return this.get('percentCompleted') === 100;
+    return this.get('percentCompleted') <= 100 && this.get('percentCompleted') >= 95.5; // approximation for cases where odd number of checks are there.
   }),
 
   transitioner: Ember.observer('checkCompleted', function() {
@@ -50,12 +69,45 @@ export default Ember.Service.extend({
     }
   }),
 
-  check() {
+  check(serviceCheckPolicy) {
+    let numberOfChecks = this.get("numberOfChecks");
+    let hdfsPromise = null;
+    if( serviceCheckPolicy.checkHdfs){
+      hdfsPromise = this._doHdfsCheck();
+    } else {
+      this.set("numberOfChecks", this.get("numberOfChecks") - 1);
+      this.set("hdfsCheckStatus", STATUS.skipped);
+    }
+
+    let atsPromise = null;
+    if( serviceCheckPolicy.checkATS){
+      atsPromise = this._doAtsCheck();
+    }else {
+      this.set("numberOfChecks", this.get("numberOfChecks") - 1);
+      this.set("atsCheckStatus", STATUS.skipped);
+    }
+
+    let userHomePromise = null;
+    if( serviceCheckPolicy.checkHomeDirectory){
+      userHomePromise = this._doUserHomeCheck();
+    }else {
+      this.set("numberOfChecks", this.get("numberOfChecks") - 1);
+      this.set("userHomeCheckStatus", STATUS.skipped);
+    }
+
+    let hivePromise = null;
+    if( serviceCheckPolicy.checkHive == true){
+      hivePromise = this._doHiveCheck();
+    }else{
+      this.set("numberOfChecks", this.get("numberOfChecks") - 1);
+      this.set("hiveCheckStatus", STATUS.skipped);
+    }
+
     let promises = {
-      hdfsPromise: this._doHdfsCheck(),
-      atsPromise: this._doAtsCheck(),
-      userHomePromise: this._doUserHomeCheck(),
-       hivePromise: this._doHiveCheck()
+      hdfsPromise: hdfsPromise,
+      atsPromise: atsPromise,
+      userHomePromise: userHomePromise,
+      hivePromise: hivePromise
     };
     return Ember.RSVP.hashSettled(promises);
   },

+ 11 - 1
contrib/views/hive20/src/main/resources/ui/app/templates/service-check.hbs

@@ -22,21 +22,31 @@
     </div>
   </div>
 
+  {{#if serviceCheckPolicy.checkHdfs}}
   {{service-check-entry label="HDFS Check"
                         status=serviceCheck.hdfsCheckStatus
                         error=hdfsError
   }}
+  {{/if}}
+
+  {{#if serviceCheckPolicy.checkHomeDirectory}}
   {{service-check-entry label="USER HOME Check"
                         status=serviceCheck.userHomeCheckStatus
                         error=userHomeError
   }}
+  {{/if}}
+
+  {{#if serviceCheckPolicy.checkATS}}
   {{service-check-entry label="ATS Check"
                         status=serviceCheck.atsCheckStatus
                         error=atsError
   }}
+  {{/if}}
+
+  {{#if serviceCheckPolicy.checkHive}}
   {{service-check-entry label="HIVE Check"
                         status=serviceCheck.hiveCheckStatus
                         error=hiveError
   }}
-
+  {{/if}}
 </div>