Browse Source

HDFS-8388. Time and Date format need to be in sync in NameNode UI page. Contributed by Surendra Singh Lilhore.

(cherry picked from commit 65ccf2b1252a5c83755fa24a93cf1d30ee59b2c3)
Akira Ajisaka 10 năm trước cách đây
mục cha
commit
25efccba01

+ 2 - 0
hadoop-common-project/hadoop-common/src/site/markdown/Metrics.md

@@ -191,6 +191,8 @@ Each metrics record contains tags such as ProcessName, SessionId, and Hostname a
 | `GetImageAvgTime` | Average fsimage download time in milliseconds |
 | `PutImageNumOps` | Total number of fsimage uploads to SecondaryNameNode |
 | `PutImageAvgTime` | Average fsimage upload time in milliseconds |
+| `NNStarted`| NameNode start time |
+| `NNStartedTimeInMillis`| NameNode start time in milliseconds |
 
 FSNamesystem
 ------------

+ 3 - 0
hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt

@@ -926,6 +926,9 @@ Release 2.8.0 - UNRELEASED
     HDFS-8950. NameNode refresh doesn't remove DataNodes that are no longer in
     the allowed list (Daniel Templeton)
 
+    HDFS-8388. Time and Date format need to be in sync in NameNode UI page.
+    (Surendra Singh Lilhore via aajisaka)
+
 Release 2.7.2 - UNRELEASED
 
   INCOMPATIBLE CHANGES

+ 5 - 0
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java

@@ -6126,6 +6126,11 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean,
     return getStartTime().toString();
   }
 
+  @Override // NameNodeMXBean
+  public long getNNStartedTimeInMillis() {
+    return startTime;
+  }
+
   @Override  // NameNodeMXBean
   public String getCompileInfo() {
     return VersionInfo.getDate() + " by " + VersionInfo.getUser() +

+ 6 - 0
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeMXBean.java

@@ -238,6 +238,12 @@ public interface NameNodeMXBean {
    */
   public String getNNStarted();
 
+  /**
+   * Gets the NN start time in milliseconds.
+   * @return the NN start time in msec
+   */
+  long getNNStartedTimeInMillis();
+
   /**
    * Get the compilation information which contains date, user and branch
    *

+ 2 - 3
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.html

@@ -130,9 +130,9 @@
   <tr><th>Namenode ID:</th><td>{NamenodeID}</td></tr>
 {/HAInfo}
 {#nn}
-  <tr><th>Started:</th><td>{NNStarted}</td></tr>
+  <tr><th>Started:</th><td>{NNStartedTimeInMillis|date_tostring}</td></tr>
   <tr><th>Version:</th><td>{Version}</td></tr>
-  <tr><th>Compiled:</th><td>{CompileInfo}</td></tr>
+  <tr><th>Compiled:</th><td>{CompileInfo|format_compile_info}</td></tr>
   <tr><th>Cluster ID:</th><td>{ClusterId}</td></tr>
   <tr><th>Block Pool ID:</th><td>{BlockPoolId}</td></tr>
 {/nn}
@@ -423,7 +423,6 @@ There are no reported volume failures.
 </script><script type="text/javascript" src="/static/bootstrap-3.0.2/js/bootstrap.min.js">
 </script><script type="text/javascript" src="/static/dataTables.bootstrap.js">
 </script><script type="text/javascript" src="/static/moment.min.js">
-</script><script type="text/javascript" src="/static/moment.min.js">
 </script><script type="text/javascript" src="/static/dust-full-2.0.0.min.js">
 </script><script type="text/javascript" src="/static/dust-helpers-1.1.1.min.js">
 </script><script type="text/javascript" src="/static/dfs-dust.js">

+ 3 - 3
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.js

@@ -57,7 +57,7 @@
 
       'helper_date_tostring' : function (chunk, ctx, bodies, params) {
         var value = dust.helpers.tap(params.value, chunk, ctx);
-        return chunk.write('' + new Date(Number(value)).toLocaleString());
+        return chunk.write('' + moment(Number(value)).format('ddd MMM DD HH:mm:ss ZZ YYYY'));
       }
     };
 
@@ -175,7 +175,7 @@
     var HELPERS = {
       'helper_relative_time' : function (chunk, ctx, bodies, params) {
         var value = dust.helpers.tap(params.value, chunk, ctx);
-        return chunk.write(moment().subtract(Number(value), 'seconds').format('YYYY-MM-DD HH:mm:ss'));
+        return chunk.write(moment().subtract(Number(value), 'seconds').format('ddd MMM DD HH:mm:ss ZZ YYYY'));
       },
       'helper_usage_bar' : function (chunk, ctx, bodies, params) {
         var value = dust.helpers.tap(params.value, chunk, ctx);
@@ -262,7 +262,7 @@
     var HELPERS = {
       'helper_date_tostring' : function (chunk, ctx, bodies, params) {
         var value = dust.helpers.tap(params.value, chunk, ctx);
-        return chunk.write('' + new Date(Number(value)).toLocaleString());
+        return chunk.write('' + moment(Number(value)).format('ddd MMM DD HH:mm:ss ZZ YYYY'));
       }
     };
 

+ 1 - 0
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html

@@ -199,6 +199,7 @@
     </script><script type="text/javascript" src="/static/dust-helpers-1.1.1.min.js">
     </script><script type="text/javascript" src="/static/dfs-dust.js">
     </script><script type="text/javascript" src="explorer.js">
+    </script><script type="text/javascript" src="/static/moment.min.js">
     </script>
   </body>
 </html>

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js

@@ -146,7 +146,7 @@
     var HELPERS = {
       'helper_date_tostring' : function (chunk, ctx, bodies, params) {
         var value = dust.helpers.tap(params.value, chunk, ctx);
-        return chunk.write('' + new Date(Number(value)).toLocaleString());
+        return chunk.write('' + moment(Number(value)).format('ddd MMM DD HH:mm:ss ZZ YYYY'));
       }
     };
     var url = '/webhdfs/v1' + encode_path(dir) + '?op=LISTSTATUS';

+ 7 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/dfs-dust.js

@@ -58,9 +58,15 @@
     },
 
     'date_tostring' : function (v) {
-      return new Date(Number(v)).toLocaleString();
+      return moment(Number(v)).format('ddd MMM DD HH:mm:ss ZZ YYYY');
     },
 
+    'format_compile_info' : function (v) {
+      var info = v.split(" by ")
+      var date = moment(info[0]).format('ddd MMM DD HH:mm:ss ZZ YYYY');
+      return date.concat(" by ").concat(info[1]);
+     },
+
     'helper_to_permission': function (v) {
       var symbols = [ '---', '--x', '-w-', '-wx', 'r--', 'r-x', 'rw-', 'rwx' ];
       var vInt = parseInt(v, 8);