Browse Source

svn merge -c 1477769 from branch-1 for HDFS-4776. Backport SecondaryNameNode web ui.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-1.2@1477770 13f79535-47bb-0310-9956-ffa450edef68
Tsz-wo Sze 12 years ago
parent
commit
af0248fc56

+ 2 - 0
CHANGES.txt

@@ -71,6 +71,8 @@ Release 1.2.0 - 2013.04.16
     MAPREDUCE-4824. Provide a mechanism for jobs to indicate they should not
     be recovered on JobTracker restart. (tomwhite & acmurthy via acmurthy) 
 
+    HDFS-4776. Backport SecondaryNameNode web ui.  (szetszwo)
+
   IMPROVEMENTS
 
     HADOOP-9434. Backport HADOOP-9267: hadoop -h|-{0,2}help should print usage.

+ 7 - 0
build.xml

@@ -572,6 +572,13 @@
      webxml="${build.webapps}/datanode/WEB-INF/web.xml">
     </jsp-compile>
 
+    <jsp-compile
+     uriroot="${src.webapps}/secondary"
+     outputdir="${build.src}"
+     package="org.apache.hadoop.hdfs.server.namenode"
+     webxml="${build.webapps}/secondary/WEB-INF/web.xml">
+    </jsp-compile>
+
     <!-- Compile Java files (excluding JSPs) checking warnings -->
     <javac 
      encoding="${build.encoding}" 

+ 8 - 0
src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java

@@ -637,4 +637,12 @@ public class JspHelper {
         + VersionInfo.getDate() + " by " + VersionInfo.getUser()
         + "</td></tr>\n</table></div>";
   }
+
+  /** Return a table containing version information. */
+  public static String getVersionTable() {
+    return "<div id='dfstable'><table>"       
+        + "\n  <tr><td id='col1'>Version:</td><td>" + VersionInfo.getVersion() + ", " + VersionInfo.getRevision()
+        + "\n  <tr><td id='col1'>Compiled:</td><td>" + VersionInfo.getDate() + " by " + VersionInfo.getUser()
+        + "\n</table></div>";
+  }
 }

+ 21 - 6
src/hdfs/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java

@@ -17,17 +17,19 @@
  */
 package org.apache.hadoop.hdfs.server.namenode;
 
+import static org.apache.hadoop.hdfs.protocol.FSConstants.BUFFER_SIZE;
+
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.IOException;
 import java.net.InetSocketAddress;
-import java.net.URI;
 import java.security.DigestInputStream;
 import java.security.MessageDigest;
 import java.security.PrivilegedAction;
 import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
 import java.util.Collection;
+import java.util.Date;
 import java.util.HashMap;
 import java.util.Iterator;
 import java.util.Map;
@@ -35,11 +37,9 @@ import java.util.Map;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.server.common.HdfsConstants;
 import org.apache.hadoop.hdfs.server.common.InconsistentFSStateException;
-import static org.apache.hadoop.hdfs.protocol.FSConstants.BUFFER_SIZE;
 import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocol;
 import org.apache.hadoop.http.HttpServer;
 import org.apache.hadoop.io.MD5Hash;
@@ -76,6 +76,9 @@ public class SecondaryNameNode implements Runnable {
   public static final Log LOG = 
     LogFactory.getLog(SecondaryNameNode.class.getName());
 
+  private final long starttime = System.currentTimeMillis();
+  private volatile long lastCheckpointTime = 0;
+
   private String fsName;
   private CheckpointStorage checkpointImage;
 
@@ -91,8 +94,20 @@ public class SecondaryNameNode implements Runnable {
   private Collection<File> checkpointDirs;
   private Collection<File> checkpointEditsDirs;
   private long checkpointPeriod;	// in seconds
-  private long checkpointSize;    // size (in MB) of current Edit Log
-
+  private long checkpointSize;    // size (in bytes) of current Edit Log
+
+  /** {@inheritDoc} */
+  public String toString() {
+    return getClass().getSimpleName() + " Status" 
+      + "\nName Node Address    : " + nameNodeAddr   
+      + "\nStart Time           : " + new Date(starttime)
+      + "\nLast Checkpoint Time : " + (lastCheckpointTime == 0? "--": new Date(lastCheckpointTime))
+      + "\nCheckpoint Period    : " + checkpointPeriod + " seconds"
+      + "\nCheckpoint Size      : " + StringUtils.byteDesc(checkpointSize)
+                                    + " (= " + checkpointSize + " bytes)" 
+      + "\nCheckpoint Dirs      : " + checkpointDirs
+      + "\nCheckpoint Edits Dirs: " + checkpointEditsDirs;
+  }
   /**
    * Utility class to facilitate junit test error simulation.
    */
@@ -255,6 +270,7 @@ public class SecondaryNameNode implements Runnable {
         }
       };
 
+    infoServer.setAttribute("secondary.name.node", this);
     infoServer.setAttribute("name.system.image", checkpointImage);
     infoServer.setAttribute(JspHelper.CURRENT_CONF, conf);
     infoServer.addInternalServlet("getimage", "/getimage",
@@ -354,7 +370,6 @@ public class SecondaryNameNode implements Runnable {
     // pending edit log.
     //
     long period = 5 * 60;              // 5 minutes
-    long lastCheckpointTime = 0;
     if (checkpointPeriod < period) {
       period = checkpointPeriod;
     }

+ 13 - 0
src/webapps/secondary/index.html

@@ -0,0 +1,13 @@
+<meta HTTP-EQUIV="REFRESH" content="0;url=status.jsp"/>
+<html>
+<head><title>Hadoop Administration</title></head>
+
+<body>
+<h1>Hadoop Administration</h1>
+
+<ul> 
+  <li><a href="status.jsp">Status</a></li> 
+</ul>
+
+</body> 
+</html>

+ 20 - 0
src/webapps/secondary/status.jsp

@@ -0,0 +1,20 @@
+<%@ page
+  contentType="text/html; charset=UTF-8"
+  import="org.apache.hadoop.util.*"
+%>
+
+<html>
+<link rel="stylesheet" type="text/css" href="/static/hadoop.css">
+<title>Hadoop SecondaryNameNode</title>
+    
+<body>
+<h1>SecondaryNameNode</h1>
+<%= JspHelper.getVersionTable() %>
+<hr />
+<pre>
+<%= application.getAttribute("secondary.name.node").toString() %>
+</pre>
+
+<br />
+<b><a href="/logs/">Logs</a></b>
+<%= ServletUtil.htmlFooter() %>