Browse Source

Merging changes r1079069:r1080836 from trunk to federation

git-svn-id: https://svn.apache.org/repos/asf/hadoop/hdfs/branches/HDFS-1052@1085160 13f79535-47bb-0310-9956-ffa450edef68
Suresh Srinivas 14 years ago
parent
commit
39f7bb575d

+ 9 - 0
CHANGES.txt

@@ -288,6 +288,12 @@ Trunk (unreleased changes)
 
     HDFS-1731. Allow using a file to exclude certain tests from build (todd)
 
+    HDFS-1736. Remove the dependency from DatanodeJspHelper to FsShell.
+    (Daryn Sharp via szetszwo)
+    
+    HDFS-1731. Amend previous commit for this JIRA to fix build on Cygwin.
+    (todd)
+
   OPTIMIZATIONS
 
     HDFS-1458. Improve checkpoint performance by avoiding unnecessary image
@@ -343,6 +349,9 @@ Trunk (unreleased changes)
 
     HDFS-1748. Balancer utilization classification is incomplete.  (szetszwo)
 
+    HDFS-1738. change hdfs jmxget to return an empty string instead of 
+    null when an attribute value is not available (tanping vi boryas)
+
 Release 0.22.0 - Unreleased
 
   NEW FEATURES

+ 2 - 1
build.xml

@@ -101,7 +101,7 @@
   <property name="test.hdfs.commit.tests.file" value="${test.src.dir}/commit-tests" />
   <property name="test.hdfs.smoke.tests.file" value="${test.src.dir}/smoke-tests" />
   <property name="test.hdfs.all.tests.file" value="${test.src.dir}/all-tests" />
-  <property name="test.exclude.file" value="/dev/null" />
+  <property name="test.exclude.file" value="${test.src.dir}/empty-file" />
 
   <property name="test.hdfs.rpc.engine" value=""/>
   <property name="test.libhdfs.dir" value="${test.build.dir}/libhdfs"/>
@@ -1220,6 +1220,7 @@
         <exclude name="src/test/commit-tests" />
         <exclude name="src/test/smoke-tests" />
         <exclude name="src/test/all-tests" />
+        <exclude name="src/test/empty-file" />
         <exclude name="**/*/*.properties" />
         <exclude name="src/c++/libhdfs/config.guess" />
         <exclude name="src/c++/libhdfs/config.sub" />

+ 5 - 2
src/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java

@@ -23,6 +23,7 @@ import java.net.InetAddress;
 import java.net.InetSocketAddress;
 import java.net.URLEncoder;
 import java.security.PrivilegedExceptionAction;
+import java.text.SimpleDateFormat;
 import java.util.Date;
 import java.util.List;
 
@@ -32,7 +33,6 @@ import javax.servlet.jsp.JspWriter;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FsShell;
 import org.apache.hadoop.hdfs.DFSClient;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.DFSUtil;
@@ -63,6 +63,9 @@ public class DatanodeJspHelper {
       });
   }
 
+  private static final SimpleDateFormat lsDateFormat =
+    new SimpleDateFormat("yyyy-MM-dd HH:mm");
+
   /**
    * Get the default chunk size.
    * @param conf the configuration
@@ -188,7 +191,7 @@ public class DatanodeJspHelper {
               + JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnAddr);
             cols[0] = "<a href=\"" + datanodeUrl + "\">"
               + localFileName + "</a>";
-            cols[5] = FsShell.dateForm.format(new Date((files[i]
+            cols[5] = lsDateFormat.format(new Date((files[i]
               .getModificationTime())));
             cols[6] = files[i].getPermission().toString();
             cols[7] = files[i].getOwner();

+ 2 - 2
src/java/org/apache/hadoop/hdfs/tools/JMXGet.java

@@ -103,7 +103,7 @@ public class JMXGet {
 
       for (MBeanAttributeInfo mb : mbinfos) {
         val = mbsc.getAttribute(oname, mb.getName());
-        System.out.format(format, mb.getName(), val.toString());
+        System.out.format(format, mb.getName(), (val==null)?"":val.toString());
       }
     }
   }
@@ -130,7 +130,7 @@ public class JMXGet {
       break;
     }
 
-    return (val == null) ? null : val.toString();
+    return (val == null) ? "" : val.toString();
   }
 
   /**

+ 0 - 0
src/test/empty-file