瀏覽代碼

HDFS-6278. Merge r1589613 from trunk.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1589616 13f79535-47bb-0310-9956-ffa450edef68
Haohui Mai 11 年之前
父節點
當前提交
4ef2598d5d

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt

@@ -57,6 +57,8 @@ Release 2.5.0 - UNRELEASED
 
 
     HDFS-6265. Prepare HDFS codebase for JUnit 4.11. (cnauroth)
     HDFS-6265. Prepare HDFS codebase for JUnit 4.11. (cnauroth)
 
 
+    HDFS-6278. Create HTML5-based UI for SNN. (wheat9)
+
   OPTIMIZATIONS
   OPTIMIZATIONS
 
 
     HDFS-6214. Webhdfs has poor throughput for files >2GB (daryn)
     HDFS-6214. Webhdfs has poor throughput for files >2GB (daryn)

+ 60 - 11
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java

@@ -27,11 +27,9 @@ import java.net.URI;
 import java.net.URL;
 import java.net.URL;
 import java.security.PrivilegedAction;
 import java.security.PrivilegedAction;
 import java.security.PrivilegedExceptionAction;
 import java.security.PrivilegedExceptionAction;
-import java.util.Collection;
-import java.util.Date;
-import java.util.Iterator;
-import java.util.List;
+import java.util.*;
 
 
+import com.google.common.collect.Lists;
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.CommandLineParser;
 import org.apache.commons.cli.CommandLineParser;
 import org.apache.commons.cli.HelpFormatter;
 import org.apache.commons.cli.HelpFormatter;
@@ -70,6 +68,7 @@ import org.apache.hadoop.io.MD5Hash;
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
 import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
 import org.apache.hadoop.metrics2.source.JvmMetrics;
 import org.apache.hadoop.metrics2.source.JvmMetrics;
+import org.apache.hadoop.metrics2.util.MBeans;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.UserGroupInformation;
@@ -80,6 +79,9 @@ import org.apache.hadoop.util.Time;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableList;
+import org.apache.hadoop.util.VersionInfo;
+
+import javax.management.ObjectName;
 
 
 /**********************************************************
 /**********************************************************
  * The Secondary NameNode is a helper to the primary NameNode.
  * The Secondary NameNode is a helper to the primary NameNode.
@@ -95,7 +97,8 @@ import com.google.common.collect.ImmutableList;
  *
  *
  **********************************************************/
  **********************************************************/
 @InterfaceAudience.Private
 @InterfaceAudience.Private
-public class SecondaryNameNode implements Runnable {
+public class SecondaryNameNode implements Runnable,
+        SecondaryNameNodeInfoMXBean {
     
     
   static{
   static{
     HdfsConfiguration.init();
     HdfsConfiguration.init();
@@ -122,7 +125,7 @@ public class SecondaryNameNode implements Runnable {
   private FSNamesystem namesystem;
   private FSNamesystem namesystem;
 
 
   private Thread checkpointThread;
   private Thread checkpointThread;
-
+  private ObjectName nameNodeStatusBeanName;
 
 
   @Override
   @Override
   public String toString() {
   public String toString() {
@@ -169,11 +172,6 @@ public class SecondaryNameNode implements Runnable {
     this.namenode = namenode;
     this.namenode = namenode;
   }
   }
 
 
-  @VisibleForTesting
-  List<URI> getCheckpointDirs() {
-    return ImmutableList.copyOf(checkpointDirs);
-  }
-  
   /**
   /**
    * Create a connection to the primary namenode.
    * Create a connection to the primary namenode.
    */
    */
@@ -265,6 +263,9 @@ public class SecondaryNameNode implements Runnable {
         DFSConfigKeys.DFS_SECONDARY_NAMENODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY,
         DFSConfigKeys.DFS_SECONDARY_NAMENODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY,
         DFSConfigKeys.DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY);
         DFSConfigKeys.DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY);
 
 
+    nameNodeStatusBeanName = MBeans.register("SecondaryNameNode",
+            "SecondaryNameNodeInfo", this);
+
     infoServer = builder.build();
     infoServer = builder.build();
 
 
     infoServer.setAttribute("secondary.name.node", this);
     infoServer.setAttribute("secondary.name.node", this);
@@ -330,6 +331,10 @@ public class SecondaryNameNode implements Runnable {
     } catch (Exception e) {
     } catch (Exception e) {
       LOG.warn("Exception shutting down SecondaryNameNode", e);
       LOG.warn("Exception shutting down SecondaryNameNode", e);
     }
     }
+    if (nameNodeStatusBeanName != null) {
+      MBeans.unregister(nameNodeStatusBeanName);
+      nameNodeStatusBeanName = null;
+    }
     try {
     try {
       if (checkpointImage != null) {
       if (checkpointImage != null) {
         checkpointImage.close();
         checkpointImage.close();
@@ -679,6 +684,50 @@ public class SecondaryNameNode implements Runnable {
     checkpointThread.start();
     checkpointThread.start();
   }
   }
 
 
+  @Override // SecondaryNameNodeInfoMXXBean
+  public String getHostAndPort() {
+    return NetUtils.getHostPortString(nameNodeAddr);
+  }
+
+  @Override // SecondaryNameNodeInfoMXXBean
+  public long getStartTime() {
+    return starttime;
+  }
+
+  @Override // SecondaryNameNodeInfoMXXBean
+  public long getLastCheckpointTime() {
+    return lastCheckpointTime;
+  }
+
+  @Override // SecondaryNameNodeInfoMXXBean
+  public String[] getCheckpointDirectories() {
+    ArrayList<String> r = Lists.newArrayListWithCapacity(checkpointDirs.size());
+    for (URI d : checkpointDirs) {
+      r.add(d.toString());
+    }
+    return r.toArray(new String[r.size()]);
+  }
+
+  @Override // SecondaryNameNodeInfoMXXBean
+  public String[] getCheckpointEditlogDirectories() {
+    ArrayList<String> r = Lists.newArrayListWithCapacity(checkpointEditsDirs.size());
+    for (URI d : checkpointEditsDirs) {
+      r.add(d.toString());
+    }
+    return r.toArray(new String[r.size()]);
+  }
+
+  @Override // VersionInfoMXBean
+  public String getCompileInfo() {
+    return VersionInfo.getDate() + " by " + VersionInfo.getUser() +
+            " from " + VersionInfo.getBranch();
+  }
+
+  @Override // VersionInfoMXBean
+  public String getSoftwareVersion() {
+    return VersionInfo.getVersion();
+  }
+
 
 
   /**
   /**
    * Container for parsed command-line options.
    * Container for parsed command-line options.

+ 52 - 0
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNodeInfoMXBean.java

@@ -0,0 +1,52 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.server.namenode;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * JMX information of the secondary NameNode
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public interface SecondaryNameNodeInfoMXBean extends VersionInfoMXBean {
+  /**
+   * Gets the host and port colon separated.
+   */
+  public String getHostAndPort();
+
+  /**
+   * @return the timestamp of when the SNN starts
+   */
+  public long getStartTime();
+
+  /**
+   * @return the timestamp of the last checkpoint
+   */
+  public long getLastCheckpointTime();
+
+  /**
+   * @return the directories that store the checkpoint images
+   */
+  public String[] getCheckpointDirectories();
+  /**
+   * @return the directories that store the edit logs
+   */
+  public String[] getCheckpointEditlogDirectories();
+}

+ 35 - 0
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/VersionInfoMXBean.java

@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.server.namenode;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public interface VersionInfoMXBean {
+  /**
+   * @return the compilation information which contains date, user and branch
+   */
+  public String getCompileInfo();
+
+  /**
+   * @return the software version
+   */
+  public String getSoftwareVersion();
+}

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.html

@@ -351,7 +351,7 @@
 </script><script type="text/javascript" src="/static/bootstrap-3.0.2/js/bootstrap.min.js">
 </script><script type="text/javascript" src="/static/bootstrap-3.0.2/js/bootstrap.min.js">
 </script><script type="text/javascript" src="/static/dust-full-2.0.0.min.js">
 </script><script type="text/javascript" src="/static/dust-full-2.0.0.min.js">
 </script><script type="text/javascript" src="/static/dust-helpers-1.1.1.min.js">
 </script><script type="text/javascript" src="/static/dust-helpers-1.1.1.min.js">
-</script><script type="text/javascript" src="dfs-dust.js">
+</script><script type="text/javascript" src="/static/dfs-dust.js">
 </script><script type="text/javascript" src="dfshealth.js">
 </script><script type="text/javascript" src="dfshealth.js">
 </script>
 </script>
 </body>
 </body>

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html

@@ -122,7 +122,7 @@
     </script><script type="text/javascript" src="/static/bootstrap-3.0.2/js/bootstrap.min.js">
     </script><script type="text/javascript" src="/static/bootstrap-3.0.2/js/bootstrap.min.js">
     </script><script type="text/javascript" src="/static/dust-full-2.0.0.min.js">
     </script><script type="text/javascript" src="/static/dust-full-2.0.0.min.js">
     </script><script type="text/javascript" src="/static/dust-helpers-1.1.1.min.js">
     </script><script type="text/javascript" src="/static/dust-helpers-1.1.1.min.js">
-    </script><script type="text/javascript" src="dfs-dust.js">
+    </script><script type="text/javascript" src="/static/dfs-dust.js">
     </script><script type="text/javascript" src="explorer.js">
     </script><script type="text/javascript" src="explorer.js">
     </script>
     </script>
     <hr />
     <hr />

+ 16 - 10
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/secondary/index.html

@@ -1,5 +1,3 @@
-<meta HTTP-EQUIV="REFRESH" content="0;url=status.jsp"/>
-<html>
 <!--
 <!--
    Licensed to the Apache Software Foundation (ASF) under one or more
    Licensed to the Apache Software Foundation (ASF) under one or more
    contributor license agreements.  See the NOTICE file distributed with
    contributor license agreements.  See the NOTICE file distributed with
@@ -16,14 +14,22 @@
    See the License for the specific language governing permissions and
    See the License for the specific language governing permissions and
    limitations under the License.
    limitations under the License.
 -->
 -->
-<head><title>Hadoop Administration</title></head>
-
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
+        "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml">
+<head>
+  <meta http-equiv="REFRESH" content="0;url=status.html" />
+  <title>Hadoop Administration</title>
+</head>
 <body>
 <body>
+<script type="text/javascript">
+//<![CDATA[
+window.location.href='status.html';
+//]]>
+</script>
 <h1>Hadoop Administration</h1>
 <h1>Hadoop Administration</h1>
-
-<ul> 
-  <li><a href="status.jsp">Status</a></li> 
+<ul>
+  <li><a href="status.jsp">Status</a></li>
 </ul>
 </ul>
-
-</body> 
-</html>
+</body>
+</html>

+ 68 - 0
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/secondary/snn.js

@@ -0,0 +1,68 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+(function () {
+  "use strict";
+
+  var data = {};
+  var outstanding_requests = 2;
+
+  dust.loadSource(dust.compile($('#tmpl-snn').html(), 'snn'));
+
+  function show_error_msg(msg) {
+    $('#alert-panel-body').html(msg);
+    $('#alert-panel').show();
+  }
+
+  function finished_request() {
+    outstanding_requests--;
+    if (outstanding_requests == 0) {
+      if (data.snn !== undefined && data.conf !== undefined) {
+        var conf = data.conf;
+        data.snn.CheckpointPeriod = conf['dfs.namenode.checkpoint.period'];
+        data.snn.TxnCount = conf['dfs.namenode.checkpoint.txns'];
+        render();
+      } else {
+        show_error_msg('Failed to load the information.');
+      }
+    }
+  }
+
+  function load() {
+    $.getJSON('/jmx?qry=Hadoop:service=SecondaryNameNode,name=SecondaryNameNodeInfo', function(resp) {
+      data.snn = resp.beans[0];
+    }).always(finished_request);
+
+    $.ajax({'url': '/conf', 'dataType': 'xml'}).done(function(d) {
+      var $xml = $(d);
+      var confs = {};
+      $xml.find('property').each(function(idx,v) {
+        confs[$(v).find('name').text()] = $(v).find('value').text();
+      });
+      data.conf = confs;
+    }).always(finished_request);
+  }
+
+  function render() {
+    dust.render('snn', data, function(err, out) {
+      $('#tab-overview').html(out);
+      $('#tab-overview').addClass('active');
+    });
+  }
+
+  load();
+})();

+ 96 - 0
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/secondary/status.html

@@ -0,0 +1,96 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
+    "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<html xmlns="http://www.w3.org/1999/xhtml">
+<head>
+  <link rel="stylesheet" type="text/css"
+       href="/static/bootstrap-3.0.2/css/bootstrap.min.css" />
+  <link rel="stylesheet" type="text/css" href="/static/hadoop.css" />
+  <title>SecondaryNamenode information</title>
+</head>
+<body>
+
+<header class="navbar navbar-inverse bs-docs-nav" role="banner">
+<div class="container">
+  <div class="navbar-header">
+    <div class="navbar-brand">Hadoop</div>
+  </div>
+
+  <ul class="nav navbar-nav" id="ui-tabs">
+    <li><a>Overview</a></li>
+  </ul>
+</div>
+</header>
+
+<div class="container">
+
+<div id="alert-panel">
+  <div class="alert alert-danger">
+    <button type="button" class="close" onclick="$('#alert-panel').hide();">&times;</button>
+    <div class="alert-body" id="alert-panel-body"></div>
+  </div>
+</div>
+
+<div class="tab-content">
+  <div class="tab-pane" id="tab-overview"></div>
+</div>
+
+<div class="row">
+  <hr />
+  <div class="col-xs-2"><p>Hadoop, 2014.</p></div>
+</div>
+</div>
+
+<script type="text/x-dust-template" id="tmpl-snn">
+{#snn}
+<div class="page-header"><h1>Overview</h1></div>
+<table class="table table-bordered table-striped">
+  <tr><th>Version</th><td>{SoftwareVersion}</td></tr>
+  <tr><th>Compiled</th><td>{CompileInfo}</td></tr>
+  <tr><th>NameNode Address</th><td>{HostAndPort}</td></tr>
+  <tr><th>Started</th><td>{StartTime|date_tostring}</td></tr>
+  <tr><th>Last Checkpoint</th><td>{@if cond="{LastCheckpointTime} === 0"}Never{:else}{LastCheckpointTime|date_tostring}{/if}</td></tr>
+  <tr><th>Checkpoint Period</th><td>{CheckpointPeriod} seconds</td></tr>
+  <tr><th>Checkpoint Size</th><td>{TxnCount|fmt_bytes}</td></tr>
+</table>
+
+<div class="page-header"><h2><small>Checkpoint Image URI</small></h2></div>
+<ul>
+  {#CheckpointDirectories}
+  <li>{.}</li>
+  {/CheckpointDirectories}
+</ul>
+
+<div class="page-header"><h2><small>Checkpoint Editlog URI</small></h2></div>
+<ul>
+  {#CheckpointEditlogDirectories}
+  <li>{.}</li>
+  {/CheckpointEditlogDirectories}
+</ul>
+{/snn}
+</script>
+
+<script type="text/javascript" src="/static/jquery-1.10.2.min.js">
+</script><script type="text/javascript" src="/static/bootstrap-3.0.2/js/bootstrap.min.js">
+</script><script type="text/javascript" src="/static/dust-full-2.0.0.min.js">
+</script><script type="text/javascript" src="/static/dust-helpers-1.1.1.min.js">
+</script><script type="text/javascript" src="/static/dfs-dust.js">
+</script><script type="text/javascript" src="snn.js">
+</script>
+</body>
+</html>

+ 0 - 0
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfs-dust.js → hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/dfs-dust.js


+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCheckpoint.java

@@ -2444,8 +2444,8 @@ public class TestCheckpoint {
   
   
   private static List<File> getCheckpointCurrentDirs(SecondaryNameNode secondary) {
   private static List<File> getCheckpointCurrentDirs(SecondaryNameNode secondary) {
     List<File> ret = Lists.newArrayList();
     List<File> ret = Lists.newArrayList();
-    for (URI u : secondary.getCheckpointDirs()) {
-      File checkpointDir = new File(u.getPath());
+    for (String u : secondary.getCheckpointDirectories()) {
+      File checkpointDir = new File(URI.create(u).getPath());
       ret.add(new File(checkpointDir, "current"));
       ret.add(new File(checkpointDir, "current"));
     }
     }
     return ret;
     return ret;

+ 20 - 20
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecondaryWebUi.java

@@ -17,20 +17,18 @@
  */
  */
 package org.apache.hadoop.hdfs.server.namenode;
 package org.apache.hadoop.hdfs.server.namenode;
 
 
-import static org.junit.Assert.assertTrue;
-
-import java.io.IOException;
-import java.net.MalformedURLException;
-import java.net.URL;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
-import org.apache.hadoop.hdfs.DFSTestUtil;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.junit.AfterClass;
 import org.junit.AfterClass;
+import org.junit.Assert;
 import org.junit.BeforeClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.Test;
 
 
+import javax.management.*;
+import java.io.IOException;
+import java.lang.management.ManagementFactory;
+
 public class TestSecondaryWebUi {
 public class TestSecondaryWebUi {
   
   
   private static MiniDFSCluster cluster;
   private static MiniDFSCluster cluster;
@@ -59,18 +57,20 @@ public class TestSecondaryWebUi {
   }
   }
 
 
   @Test
   @Test
-  public void testSecondaryWebUi() throws IOException {
-    String pageContents = DFSTestUtil.urlGet(new URL("http://localhost:" +
-        SecondaryNameNode.getHttpAddress(conf).getPort() + "/status.jsp"));
-    assertTrue("Didn't find \"Last Checkpoint\"",
-        pageContents.contains("Last Checkpoint"));
-  }
-  
-  @Test
-  public void testSecondaryWebJmx() throws MalformedURLException, IOException {
-    String pageContents = DFSTestUtil.urlGet(new URL("http://localhost:" +
-        SecondaryNameNode.getHttpAddress(conf).getPort() + "/jmx"));
-    assertTrue(pageContents.contains(
-        "Hadoop:service=SecondaryNameNode,name=JvmMetrics"));
+  public void testSecondaryWebUi()
+          throws IOException, MalformedObjectNameException,
+                 AttributeNotFoundException, MBeanException,
+                 ReflectionException, InstanceNotFoundException {
+    MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
+    ObjectName mxbeanName = new ObjectName(
+            "Hadoop:service=SecondaryNameNode,name=SecondaryNameNodeInfo");
+
+    String[] checkpointDir = (String[]) mbs.getAttribute(mxbeanName,
+            "CheckpointDirectories");
+    Assert.assertArrayEquals(checkpointDir, snn.getCheckpointDirectories());
+    String[] checkpointEditlogDir = (String[]) mbs.getAttribute(mxbeanName,
+            "CheckpointEditlogDirectories");
+    Assert.assertArrayEquals(checkpointEditlogDir,
+            snn.getCheckpointEditlogDirectories());
   }
   }
 }
 }