Sfoglia il codice sorgente

YARN-6365. Get static SLS html resources from classpath. Contributed by Yufei Gu.

Andrew Wang 8 anni fa
parent
commit
7e075a50e3

+ 16 - 0
hadoop-tools/hadoop-sls/pom.xml

@@ -76,6 +76,22 @@
   </dependencies>
   </dependencies>
 
 
   <build>
   <build>
+    <resources>
+      <resource>
+      <directory>src/main/</directory>
+      <includes>
+        <include>html/simulate.html.template</include>
+        <include>html/simulate.info.html.template</include>
+        <include>html/track.html.template</include>
+        <include>html/css/bootstrap-responsive.min.css</include>
+        <include>html/css/bootstrap.min.css</include>
+        <include>html/js/thirdparty/bootstrap.min.js</include>
+        <include>html/js/thirdparty/d3.v3.js</include>
+        <include>html/js/thirdparty/d3-LICENSE</include>
+        <include>html/js/thirdparty/jquery.js</include>
+      </includes>
+      </resource>
+    </resources>
     <plugins>
     <plugins>
       <plugin>
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <groupId>org.apache.maven.plugins</groupId>

+ 2 - 8
hadoop-tools/hadoop-sls/src/main/bin/slsrun.sh

@@ -71,8 +71,6 @@ function parse_args()
 function calculate_classpath
 function calculate_classpath
 {
 {
   hadoop_add_to_classpath_tools hadoop-sls
   hadoop_add_to_classpath_tools hadoop-sls
-  hadoop_debug "Injecting ${HADOOP_TOOLS_HOME}/${HADOOP_TOOLS_DIR}/sls/html into CLASSPATH"
-  hadoop_add_classpath "${HADOOP_TOOLS_HOME}/${HADOOP_TOOLS_DIR}/sls/html"
 }
 }
 
 
 function run_simulation() {
 function run_simulation() {
@@ -103,16 +101,12 @@ function run_simulation() {
   hadoop_java_exec sls org.apache.hadoop.yarn.sls.SLSRunner ${args}
   hadoop_java_exec sls org.apache.hadoop.yarn.sls.SLSRunner ${args}
 }
 }
 
 
-this="${BASH_SOURCE-$0}"
-bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)
-
-# copy 'html' directory to current directory to make sure web sever can access
-cp -r "${bin}/../html" "$(pwd)"
-
 # let's locate libexec...
 # let's locate libexec...
 if [[ -n "${HADOOP_HOME}" ]]; then
 if [[ -n "${HADOOP_HOME}" ]]; then
   HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec"
   HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec"
 else
 else
+  this="${BASH_SOURCE-$0}"
+  bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)
   HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../../../../../libexec"
   HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../../../../../libexec"
 fi
 fi
 
 

+ 16 - 17
hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/web/SLSWebApp.java

@@ -18,7 +18,6 @@
 
 
 package org.apache.hadoop.yarn.sls.web;
 package org.apache.hadoop.yarn.sls.web;
 
 
-import java.io.File;
 import java.io.IOException;
 import java.io.IOException;
 import java.io.ObjectInputStream;
 import java.io.ObjectInputStream;
 import java.text.MessageFormat;
 import java.text.MessageFormat;
@@ -31,7 +30,7 @@ import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 import javax.servlet.http.HttpServletResponse;
 
 
-import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
 import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEventType;
 import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEventType;
@@ -39,17 +38,17 @@ import org.apache.hadoop.yarn.sls.SLSRunner;
 import org.apache.hadoop.yarn.sls.scheduler.FairSchedulerMetrics;
 import org.apache.hadoop.yarn.sls.scheduler.FairSchedulerMetrics;
 import org.apache.hadoop.yarn.sls.scheduler.SchedulerMetrics;
 import org.apache.hadoop.yarn.sls.scheduler.SchedulerMetrics;
 import org.apache.hadoop.yarn.sls.scheduler.SchedulerWrapper;
 import org.apache.hadoop.yarn.sls.scheduler.SchedulerWrapper;
+
 import org.eclipse.jetty.http.MimeTypes;
 import org.eclipse.jetty.http.MimeTypes;
 import org.eclipse.jetty.server.Handler;
 import org.eclipse.jetty.server.Handler;
 import org.eclipse.jetty.server.Request;
 import org.eclipse.jetty.server.Request;
 import org.eclipse.jetty.server.Server;
 import org.eclipse.jetty.server.Server;
-
+import org.eclipse.jetty.server.handler.AbstractHandler;
+import org.eclipse.jetty.server.handler.ResourceHandler;
 import com.codahale.metrics.Counter;
 import com.codahale.metrics.Counter;
 import com.codahale.metrics.Gauge;
 import com.codahale.metrics.Gauge;
 import com.codahale.metrics.Histogram;
 import com.codahale.metrics.Histogram;
 import com.codahale.metrics.MetricRegistry;
 import com.codahale.metrics.MetricRegistry;
-import org.eclipse.jetty.server.handler.AbstractHandler;
-import org.eclipse.jetty.server.handler.ResourceHandler;
 
 
 @Private
 @Private
 @Unstable
 @Unstable
@@ -86,12 +85,12 @@ public class SLSWebApp extends HttpServlet {
     // load templates
     // load templates
     ClassLoader cl = Thread.currentThread().getContextClassLoader();
     ClassLoader cl = Thread.currentThread().getContextClassLoader();
     try {
     try {
-      simulateInfoTemplate = FileUtils.readFileToString(new File(
-              cl.getResource("simulate.info.html.template").getFile()));
-      simulateTemplate = FileUtils.readFileToString(new File(
-              cl.getResource("simulate.html.template").getFile()));
-      trackTemplate = FileUtils.readFileToString(new File(
-              cl.getResource("track.html.template").getFile()));
+      simulateInfoTemplate = IOUtils.toString(
+          cl.getResourceAsStream("html/simulate.info.html.template"));
+      simulateTemplate = IOUtils.toString(
+          cl.getResourceAsStream("html/simulate.html.template"));
+      trackTemplate = IOUtils.toString(
+          cl.getResourceAsStream("html/track.html.template"));
     } catch (IOException e) {
     } catch (IOException e) {
       e.printStackTrace();
       e.printStackTrace();
     }
     }
@@ -107,20 +106,20 @@ public class SLSWebApp extends HttpServlet {
 
 
   public SLSWebApp(SchedulerWrapper wrapper, int metricsAddressPort) {
   public SLSWebApp(SchedulerWrapper wrapper, int metricsAddressPort) {
     this.wrapper = wrapper;
     this.wrapper = wrapper;
-    handleOperTimecostHistogramMap =
-            new HashMap<SchedulerEventType, Histogram>();
-    queueAllocatedMemoryCounterMap = new HashMap<String, Counter>();
-    queueAllocatedVCoresCounterMap = new HashMap<String, Counter>();
+    handleOperTimecostHistogramMap = new HashMap<>();
+    queueAllocatedMemoryCounterMap = new HashMap<>();
+    queueAllocatedVCoresCounterMap = new HashMap<>();
     schedulerMetrics = wrapper.getSchedulerMetrics();
     schedulerMetrics = wrapper.getSchedulerMetrics();
     metrics = schedulerMetrics.getMetrics();
     metrics = schedulerMetrics.getMetrics();
     port = metricsAddressPort;
     port = metricsAddressPort;
   }
   }
 
 
   public void start() throws Exception {
   public void start() throws Exception {
-    // static files
     final ResourceHandler staticHandler = new ResourceHandler();
     final ResourceHandler staticHandler = new ResourceHandler();
     staticHandler.setMimeTypes(new MimeTypes());
     staticHandler.setMimeTypes(new MimeTypes());
-    staticHandler.setResourceBase("html");
+    String webRootDir = getClass().getClassLoader().getResource("html").
+        toExternalForm();
+    staticHandler.setResourceBase(webRootDir);
 
 
     Handler handler = new AbstractHandler() {
     Handler handler = new AbstractHandler() {
       @Override
       @Override

+ 1 - 3
hadoop-tools/hadoop-sls/src/site/markdown/SchedulerLoadSimulator.md

@@ -97,7 +97,7 @@ This section will show how to use the simulator. Here let `$HADOOP_ROOT` represe
 
 
 *   `bin`: contains running scripts for the simulator.
 *   `bin`: contains running scripts for the simulator.
 
 
-*   `html`: contains several html/css/js files we needed for real-time tracking.
+*   `html`: Users can also reproduce those real-time tracking charts in offline mode. Just upload the `realtimetrack.json` to `$HADOOP_ROOT/share/hadoop/tools/sls/html/showSimulationTrace.html`. For browser security problem, need to put files `realtimetrack.json` and `showSimulationTrace.html` in the same directory.
 
 
 *   `sample-conf`: specifies the simulator configurations.
 *   `sample-conf`: specifies the simulator configurations.
 
 
@@ -279,8 +279,6 @@ After the simulator finishes, all logs are saved in the output directory specifi
 
 
 *   Folder `metrics`: logs generated by the Metrics.
 *   Folder `metrics`: logs generated by the Metrics.
 
 
-Users can also reproduce those real-time tracking charts in offline mode. Just upload the `realtimetrack.json` to `$HADOOP_ROOT/share/hadoop/tools/sls/html/showSimulationTrace.html`. For browser security problem, need to put files `realtimetrack.json` and `showSimulationTrace.html` in the same directory.
-
 Appendix
 Appendix
 --------
 --------