Browse Source

HADOOP-9056. Build native library on Windows. Contributed by Chuan Liu and Arpit Agarwal.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-trunk-win@1420921 13f79535-47bb-0310-9956-ffa450edef68
Suresh Srinivas 12 years ago
parent
commit
29cc744985
40 changed files with 2070 additions and 562 deletions
  1. 3 0
      hadoop-common-project/hadoop-common/CHANGES.branch-trunk-win.txt
  2. 47 1
      hadoop-common-project/hadoop-common/pom.xml
  3. 24 0
      hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.cmd
  4. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
  5. 43 26
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java
  6. 108 52
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java
  7. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java
  8. 17 21
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java
  9. 468 205
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
  10. 25 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIOException.java
  11. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCrc32.java
  12. 4 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PlatformName.java
  13. 32 0
      hadoop-common-project/hadoop-common/src/main/native/native.sln
  14. 72 0
      hadoop-common-project/hadoop-common/src/main/native/native.vcxproj
  15. 54 0
      hadoop-common-project/hadoop-common/src/main/native/native.vcxproj.filters
  16. 11 3
      hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c
  17. 9 1
      hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c
  18. 139 48
      hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c
  19. 96 43
      hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c
  20. 14 5
      hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/org_apache_hadoop_io_compress_zlib.h
  21. 374 25
      hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
  22. 43 2
      hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c
  23. 8 0
      hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.h
  24. 39 15
      hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c
  25. 18 8
      hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c
  26. 3 0
      hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.h
  27. 92 11
      hadoop-common-project/hadoop-common/src/main/native/src/org_apache_hadoop.h
  28. 2 0
      hadoop-common-project/hadoop-common/src/main/native/src/test/org/apache/hadoop/util/test_bulk_crc32.c
  29. 1 1
      hadoop-common-project/hadoop-common/src/main/winutils/libwinutils.c
  30. 5 1
      hadoop-common-project/hadoop-common/src/main/winutils/libwinutils.vcxproj
  31. 39 39
      hadoop-common-project/hadoop-common/src/main/winutils/winutils.sln
  32. 4 0
      hadoop-common-project/hadoop-common/src/main/winutils/winutils.vcxproj
  33. 33 0
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java
  34. 33 3
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java
  35. 167 29
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
  36. 4 4
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java
  37. 6 5
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockSender.java
  38. 4 2
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/FadvisedChunkedFile.java
  39. 3 2
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/FadvisedFileRegion.java
  40. 22 0
      hadoop-project/pom.xml

+ 3 - 0
hadoop-common-project/hadoop-common/CHANGES.branch-trunk-win.txt

@@ -59,3 +59,6 @@ branch-trunk-win changes - unreleased
 
 
   HADOOP-9110. winutils ls off-by-one error indexing MONTHS array can cause 
   HADOOP-9110. winutils ls off-by-one error indexing MONTHS array can cause 
   access violation. (Chris Nauroth via suresh)
   access violation. (Chris Nauroth via suresh)
+
+  HADOOP-9056. Build native library on Windows. (Chuan Liu, Arpit Agarwal via
+  suresh)

+ 47 - 1
hadoop-common-project/hadoop-common/pom.xml

@@ -587,6 +587,34 @@
               </execution>
               </execution>
             </executions>
             </executions>
           </plugin>
           </plugin>
+          <plugin>
+            <groupId>org.codehaus.mojo</groupId>
+            <artifactId>native-maven-plugin</artifactId>
+            <executions>
+              <execution>
+                <phase>compile</phase>
+                <goals>
+                  <goal>javah</goal>
+                </goals>
+                <configuration>
+                  <javahPath>${env.JAVA_HOME}/bin/javah</javahPath>
+                  <javahClassNames>
+                    <javahClassName>org.apache.hadoop.io.compress.zlib.ZlibCompressor</javahClassName>
+                    <javahClassName>org.apache.hadoop.io.compress.zlib.ZlibDecompressor</javahClassName>
+                    <javahClassName>org.apache.hadoop.security.JniBasedUnixGroupsMapping</javahClassName>
+                    <javahClassName>org.apache.hadoop.io.nativeio.NativeIO</javahClassName>
+                    <javahClassName>org.apache.hadoop.security.JniBasedUnixGroupsNetgroupMapping</javahClassName>
+                    <javahClassName>org.apache.hadoop.io.compress.snappy.SnappyCompressor</javahClassName>
+                    <javahClassName>org.apache.hadoop.io.compress.snappy.SnappyDecompressor</javahClassName>
+                    <javahClassName>org.apache.hadoop.io.compress.lz4.Lz4Compressor</javahClassName>
+                    <javahClassName>org.apache.hadoop.io.compress.lz4.Lz4Decompressor</javahClassName>
+                    <javahClassName>org.apache.hadoop.util.NativeCrc32</javahClassName>
+                  </javahClassNames>
+                  <javahOutputDirectory>${project.build.directory}/native/javah</javahOutputDirectory>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
           <plugin>
           <plugin>
             <groupId>org.codehaus.mojo</groupId>
             <groupId>org.codehaus.mojo</groupId>
             <artifactId>exec-maven-plugin</artifactId>
             <artifactId>exec-maven-plugin</artifactId>
@@ -601,7 +629,25 @@
                   <executable>msbuild</executable>
                   <executable>msbuild</executable>
                   <arguments>
                   <arguments>
                     <argument>${basedir}/src/main/winutils/winutils.sln</argument>
                     <argument>${basedir}/src/main/winutils/winutils.sln</argument>
-                    <argument>/p:Configuration=Release;OutDir=${project.build.directory}/bin/</argument>
+                    <argument>/nologo</argument>
+                    <argument>/p:Configuration=Release</argument>
+                    <argument>/p:OutDir=${project.build.directory}/bin/</argument>
+                  </arguments>
+                </configuration>
+              </execution>
+              <execution>
+                <id>compile-ms-native-dll</id>
+                <phase>compile</phase>
+                <goals>
+                  <goal>exec</goal>
+                </goals>
+                <configuration>
+                  <executable>msbuild</executable>
+                  <arguments>
+                    <argument>${basedir}/src/main/native/native.sln</argument>
+                    <argument>/nologo</argument>
+                    <argument>/p:Configuration=Release</argument>
+                    <argument>/p:OutDir=${project.build.directory}/bin/</argument>
                   </arguments>
                   </arguments>
                 </configuration>
                 </configuration>
               </execution>
               </execution>

+ 24 - 0
hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.cmd

@@ -181,6 +181,30 @@ if not defined HADOOP_POLICYFILE (
   set HADOOP_POLICYFILE=hadoop-policy.xml
   set HADOOP_POLICYFILE=hadoop-policy.xml
 )
 )
 
 
+@rem
+@rem Determine the JAVA_PLATFORM
+@rem
+
+for /f "delims=" %%A in ('%JAVA% -Xmx32m %HADOOP_JAVA_PLATFORM_OPTS% -classpath "%CLASSPATH%" org.apache.hadoop.util.PlatformName') do set JAVA_PLATFORM=%%A
+@rem replace space with underscore
+set JAVA_PLATFORM=%JAVA_PLATFORM: =_%
+
+@rem
+@rem setup 'java.library.path' for native hadoop code if necessary
+@rem
+
+mkdir c:\tmp\dir1
+@rem Check if we're running hadoop directly from the build
+set JAVA_LIBRARY_PATH=
+if exist %HADOOP_CORE_HOME%\target\bin (
+  set JAVA_LIBRARY_PATH=%HADOOP_CORE_HOME%\target\bin
+)
+
+@rem For the distro case, check the bin folder
+if exist %HADOOP_CORE_HOME%\bin (
+  set JAVA_LIBRARY_PATH=%JAVA_LIBRARY_PATH%;%HADOOP_CORE_HOME%\bin
+)
+
 @rem
 @rem
 @rem setup a default TOOL_PATH
 @rem setup a default TOOL_PATH
 @rem
 @rem

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java

@@ -729,7 +729,7 @@ public class FileUtil {
                                         FsPermission permission
                                         FsPermission permission
                                        )  throws IOException {
                                        )  throws IOException {
     if (NativeIO.isAvailable()) {
     if (NativeIO.isAvailable()) {
-      NativeIO.chmod(f.getCanonicalPath(), permission.toShort());
+      NativeIO.POSIX.chmod(f.getCanonicalPath(), permission.toShort());
     } else {
     } else {
       execCommand(f, Shell.getSetPermissionCommand(
       execCommand(f, Shell.getSetPermissionCommand(
                   String.format("%04o", permission.toShort()), false));
                   String.format("%04o", permission.toShort()), false));

+ 43 - 26
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java

@@ -21,6 +21,7 @@ package org.apache.hadoop.fs;
 import java.io.IOException;
 import java.io.IOException;
 import java.net.URI;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.URISyntaxException;
+import java.util.regex.Pattern;
 
 
 import org.apache.avro.reflect.Stringable;
 import org.apache.avro.reflect.Stringable;
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.lang.StringUtils;
@@ -46,6 +47,14 @@ public class Path implements Comparable {
   public static final boolean WINDOWS
   public static final boolean WINDOWS
     = System.getProperty("os.name").startsWith("Windows");
     = System.getProperty("os.name").startsWith("Windows");
 
 
+  /**
+   *  Pre-compiled regular expressions to detect path formats.
+   */
+  private static final Pattern hasUriScheme =
+      Pattern.compile("^[a-zA-Z][a-zA-Z0-9+-.]+:");
+  private static final Pattern hasDriveLetterSpecifier =
+      Pattern.compile("^/?[a-zA-Z]:");
+
   private URI uri;                                // a hierarchical uri
   private URI uri;                                // a hierarchical uri
 
 
   /** Resolve a child path against a parent path. */
   /** Resolve a child path against a parent path. */
@@ -81,7 +90,7 @@ public class Path implements Comparable {
                resolved.getPath(), resolved.getFragment());
                resolved.getPath(), resolved.getFragment());
   }
   }
 
 
-  private void checkPathArg( String path ) {
+  private void checkPathArg( String path ) throws IllegalArgumentException {
     // disallow construction of a Path from an empty string
     // disallow construction of a Path from an empty string
     if ( path == null ) {
     if ( path == null ) {
       throw new IllegalArgumentException(
       throw new IllegalArgumentException(
@@ -95,15 +104,16 @@ public class Path implements Comparable {
   
   
   /** Construct a path from a String.  Path strings are URIs, but with
   /** Construct a path from a String.  Path strings are URIs, but with
    * unescaped elements and some additional normalization. */
    * unescaped elements and some additional normalization. */
-  public Path(String pathString) {
+  public Path(String pathString) throws IllegalArgumentException {
     checkPathArg( pathString );
     checkPathArg( pathString );
     
     
     // We can't use 'new URI(String)' directly, since it assumes things are
     // We can't use 'new URI(String)' directly, since it assumes things are
     // escaped, which we don't require of Paths. 
     // escaped, which we don't require of Paths. 
     
     
     // add a slash in front of paths with Windows drive letters
     // add a slash in front of paths with Windows drive letters
-    if (hasWindowsDrive(pathString, false))
-      pathString = "/"+pathString;
+    if (hasWindowsDrive(pathString) && pathString.charAt(0) != '/') {
+      pathString = "/" + pathString;
+    }
 
 
     // parse uri components
     // parse uri components
     String scheme = null;
     String scheme = null;
@@ -151,7 +161,7 @@ public class Path implements Comparable {
   private void initialize(String scheme, String authority, String path,
   private void initialize(String scheme, String authority, String path,
       String fragment) {
       String fragment) {
     try {
     try {
-      this.uri = new URI(scheme, authority, normalizePath(path), null, fragment)
+      this.uri = new URI(scheme, authority, normalizePath(scheme, path), null, fragment)
         .normalize();
         .normalize();
     } catch (URISyntaxException e) {
     } catch (URISyntaxException e) {
       throw new IllegalArgumentException(e);
       throw new IllegalArgumentException(e);
@@ -169,21 +179,36 @@ public class Path implements Comparable {
    */
    */
   public static Path mergePaths(Path path1, Path path2) {
   public static Path mergePaths(Path path1, Path path2) {
     String path2Str = path2.toUri().getPath();
     String path2Str = path2.toUri().getPath();
-    if(path2.hasWindowsDrive(path2Str, path2Str.charAt(0)=='/')) {
+    if(hasWindowsDrive(path2Str)) {
       path2Str = path2Str.substring(path2Str.indexOf(':')+1);
       path2Str = path2Str.substring(path2Str.indexOf(':')+1);
     }
     }
     return new Path(path1 + path2Str);
     return new Path(path1 + path2Str);
   }
   }
 
 
-  private String normalizePath(String path) {
-    // remove double slashes & backslashes
+  /**
+   * Normalize a path string to use non-duplicated forward slashes as
+   * the path separator and remove any trailing path separators.
+   * @param scheme Supplies the URI scheme. Used to deduce whether we
+   *               should replace backslashes or not.
+   * @param path Supplies the scheme-specific part
+   * @return Normalized path string.
+   */
+  private static String normalizePath(String scheme, String path) {
+    // Remove double forward slashes.
     path = StringUtils.replace(path, "//", "/");
     path = StringUtils.replace(path, "//", "/");
-    if (Path.WINDOWS) {
+
+    // Remove backslashes if this looks like a Windows path. Avoid
+    // the substitution if it looks like a non-local URI.
+    if (WINDOWS &&
+        (hasWindowsDrive(path) ||
+         (scheme == null) ||
+         (scheme == "") ||
+         (scheme == "file"))) {
       path = StringUtils.replace(path, "\\", "/");
       path = StringUtils.replace(path, "\\", "/");
     }
     }
     
     
     // trim trailing slash from non-root path (ignoring windows drive)
     // trim trailing slash from non-root path (ignoring windows drive)
-    int minLength = hasWindowsDrive(path, true) ? 4 : 1;
+    int minLength = hasWindowsDrive(path) ? 4 : 1;
     if (path.length() > minLength && path.endsWith("/")) {
     if (path.length() > minLength && path.endsWith("/")) {
       path = path.substring(0, path.length()-1);
       path = path.substring(0, path.length()-1);
     }
     }
@@ -191,16 +216,8 @@ public class Path implements Comparable {
     return path;
     return path;
   }
   }
 
 
-  private static boolean hasWindowsDrive(String path, boolean slashed) {
-    if (!WINDOWS) return false;
-    int start = slashed ? 1 : 0;
-    return
-      path != null &&
-      path.length() >= start+2 &&
-      (slashed ? path.charAt(0) == '/' : true) &&
-      path.charAt(start+1) == ':' &&
-      ((path.charAt(start) >= 'A' && path.charAt(start) <= 'Z') ||
-       (path.charAt(start) >= 'a' && path.charAt(start) <= 'z'));
+  private static boolean hasWindowsDrive(String path) {
+    return (WINDOWS && hasDriveLetterSpecifier.matcher(path).find());
   }
   }
 
 
   /**
   /**
@@ -217,7 +234,7 @@ public class Path implements Comparable {
     int start = (slashed ? 1 : 0);
     int start = (slashed ? 1 : 0);
 
 
     return
     return
-        hasWindowsDrive(pathString, slashed) &&
+        hasWindowsDrive(pathString) &&
         pathString.length() >= (start + 3) &&
         pathString.length() >= (start + 3) &&
         ((pathString.charAt(start + 2) == SEPARATOR_CHAR) ||
         ((pathString.charAt(start + 2) == SEPARATOR_CHAR) ||
           (pathString.charAt(start + 2) == '\\'));
           (pathString.charAt(start + 2) == '\\'));
@@ -244,7 +261,7 @@ public class Path implements Comparable {
    *  True if the path component (i.e. directory) of this URI is absolute.
    *  True if the path component (i.e. directory) of this URI is absolute.
    */
    */
   public boolean isUriPathAbsolute() {
   public boolean isUriPathAbsolute() {
-    int start = hasWindowsDrive(uri.getPath(), true) ? 3 : 0;
+    int start = hasWindowsDrive(uri.getPath()) ? 3 : 0;
     return uri.getPath().startsWith(SEPARATOR, start);
     return uri.getPath().startsWith(SEPARATOR, start);
    }
    }
   
   
@@ -278,7 +295,7 @@ public class Path implements Comparable {
   public Path getParent() {
   public Path getParent() {
     String path = uri.getPath();
     String path = uri.getPath();
     int lastSlash = path.lastIndexOf('/');
     int lastSlash = path.lastIndexOf('/');
-    int start = hasWindowsDrive(path, true) ? 3 : 0;
+    int start = hasWindowsDrive(path) ? 3 : 0;
     if ((path.length() == start) ||               // empty path
     if ((path.length() == start) ||               // empty path
         (lastSlash == start && path.length() == start+1)) { // at root
         (lastSlash == start && path.length() == start+1)) { // at root
       return null;
       return null;
@@ -287,7 +304,7 @@ public class Path implements Comparable {
     if (lastSlash==-1) {
     if (lastSlash==-1) {
       parent = CUR_DIR;
       parent = CUR_DIR;
     } else {
     } else {
-      int end = hasWindowsDrive(path, true) ? 3 : 0;
+      int end = hasWindowsDrive(path) ? 3 : 0;
       parent = path.substring(0, lastSlash==end?end+1:lastSlash);
       parent = path.substring(0, lastSlash==end?end+1:lastSlash);
     }
     }
     return new Path(uri.getScheme(), uri.getAuthority(), parent);
     return new Path(uri.getScheme(), uri.getAuthority(), parent);
@@ -314,7 +331,7 @@ public class Path implements Comparable {
     if (uri.getPath() != null) {
     if (uri.getPath() != null) {
       String path = uri.getPath();
       String path = uri.getPath();
       if (path.indexOf('/')==0 &&
       if (path.indexOf('/')==0 &&
-          hasWindowsDrive(path, true) &&          // has windows drive
+          hasWindowsDrive(path) &&                // has windows drive
           uri.getScheme() == null &&              // but no scheme
           uri.getScheme() == null &&              // but no scheme
           uri.getAuthority() == null)             // or authority
           uri.getAuthority() == null)             // or authority
         path = path.substring(1);                 // remove slash before drive
         path = path.substring(1);                 // remove slash before drive
@@ -401,7 +418,7 @@ public class Path implements Comparable {
     URI newUri = null;
     URI newUri = null;
     try {
     try {
       newUri = new URI(scheme, authority , 
       newUri = new URI(scheme, authority , 
-        normalizePath(pathUri.getPath()), null, fragment);
+        normalizePath(scheme, pathUri.getPath()), null, fragment);
     } catch (URISyntaxException e) {
     } catch (URISyntaxException e) {
       throw new IllegalArgumentException(e);
       throw new IllegalArgumentException(e);
     }
     }

+ 108 - 52
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java

@@ -24,6 +24,7 @@ import java.io.IOException;
 import java.net.URI;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.URISyntaxException;
 import java.util.Arrays;
 import java.util.Arrays;
+import java.util.regex.Pattern;
 
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -56,11 +57,17 @@ public class PathData implements Comparable<PathData> {
 
 
   /* True if the URI scheme was not present in the pathString but inferred.
   /* True if the URI scheme was not present in the pathString but inferred.
    */
    */
-  private boolean inferredSchemeFromPath;
+  private boolean inferredSchemeFromPath = false;
 
 
-  /* True if backslashes in a raw Windows path were replaced.
+  /**
+   *  Pre-compiled regular expressions to detect path formats.
    */
    */
-  private boolean restoreBackslashes;
+  private static final Pattern potentialUri =
+      Pattern.compile("^[a-zA-Z][a-zA-Z0-9+-.]+:");
+  private static final Pattern windowsNonUriAbsolutePath1 =
+      Pattern.compile("^/?[a-zA-Z]:\\\\");
+  private static final Pattern windowsNonUriAbsolutePath2 =
+      Pattern.compile("^/?[a-zA-Z]:/");
 
 
   /**
   /**
    * Creates an object to wrap the given parameters as fields.  The string
    * Creates an object to wrap the given parameters as fields.  The string
@@ -97,6 +104,39 @@ public class PathData implements Comparable<PathData> {
     this(fs, pathString, lookupStat(fs, pathString, true));
     this(fs, pathString, lookupStat(fs, pathString, true));
   }
   }
 
 
+  /**
+   * Validates the given Windows path.
+   * Throws IOException on failure.
+   * @param pathString a String of the path suppliued by the user.
+   */
+  private void ValidateWindowsPath(String pathString)
+  throws IOException
+  {
+    if (windowsNonUriAbsolutePath1.matcher(pathString).find()) {
+      // Forward slashes disallowed in a backslash-separated path.
+      if (pathString.indexOf('/') != -1) {
+        throw new IOException("Invalid path string " + pathString);
+      }
+
+      inferredSchemeFromPath = true;
+      return;
+    }
+
+    // Is it a forward slash-separated absolute path?
+    if (windowsNonUriAbsolutePath2.matcher(pathString).find()) {
+      inferredSchemeFromPath = true;
+      return;
+    }
+
+    // Does it look like a URI? If so then just leave it alone.
+    if (potentialUri.matcher(pathString).find()) {
+      return;
+    }
+
+    // Looks like a relative path on Windows.
+    return;
+  }
+
   /**
   /**
    * Creates an object to wrap the given parameters as fields.  The string
    * Creates an object to wrap the given parameters as fields.  The string
    * used to create the path will be recorded since the Path object does not
    * used to create the path will be recorded since the Path object does not
@@ -112,13 +152,8 @@ public class PathData implements Comparable<PathData> {
     this.path = fs.makeQualified(new Path(uri));
     this.path = fs.makeQualified(new Path(uri));
     setStat(stat);
     setStat(stat);
 
 
-    if (Path.isWindowsAbsolutePath(pathString, false) ||
-        Path.isWindowsAbsolutePath(pathString, true)) {
-      inferredSchemeFromPath = true;
-    }
-
-    if (Path.WINDOWS && (pathString.indexOf('\\') != -1)) {
-      restoreBackslashes = true;
+    if (Path.WINDOWS) {
+      ValidateWindowsPath(pathString);
     }
     }
   }
   }
 
 
@@ -409,17 +444,11 @@ public class PathData implements Comparable<PathData> {
     // Drop the scheme if it was inferred to ensure fidelity between
     // Drop the scheme if it was inferred to ensure fidelity between
     // the input and output path strings.
     // the input and output path strings.
     if ((scheme == null) || (inferredSchemeFromPath)) {
     if ((scheme == null) || (inferredSchemeFromPath)) {
-
       if (Path.isWindowsAbsolutePath(decodedRemainder, true)) {
       if (Path.isWindowsAbsolutePath(decodedRemainder, true)) {
         // Strip the leading '/' added in stringToUri so users see a valid
         // Strip the leading '/' added in stringToUri so users see a valid
         // Windows path.
         // Windows path.
         decodedRemainder = decodedRemainder.substring(1);
         decodedRemainder = decodedRemainder.substring(1);
       }
       }
-
-      if (restoreBackslashes) {
-        decodedRemainder = decodedRemainder.replace('\\', '/');
-      }
-
       return decodedRemainder;
       return decodedRemainder;
     } else {
     } else {
       StringBuilder buffer = new StringBuilder();
       StringBuilder buffer = new StringBuilder();
@@ -442,13 +471,56 @@ public class PathData implements Comparable<PathData> {
     return ((LocalFileSystem)fs).pathToFile(path);
     return ((LocalFileSystem)fs).pathToFile(path);
   }
   }
 
 
+  /** Normalize the given Windows path string. This does the following:
+   *    1. Adds "file:" scheme for absolute paths.
+   *    2. Ensures the scheme-specific part starts with '/' per RFC2396.
+   *    3. Replaces backslash path separators with forward slashes.
+   *    @param pathString Path string supplied by the user.
+   *    @return normalized absolute path string. Returns the input string
+   *            if it is not a Windows absolute path.
+   */
+  private static String normalizeWindowsPath(String pathString)
+  throws IOException
+  {
+    if (!Path.WINDOWS) {
+      return pathString;
+    }
+
+    boolean slashed =
+        ((pathString.length() >= 1) && (pathString.charAt(0) == '/'));
+
+    // Is it a backslash-separated absolute path?
+    if (windowsNonUriAbsolutePath1.matcher(pathString).find()) {
+      // Forward slashes disallowed in a backslash-separated path.
+      if (pathString.indexOf('/') != -1) {
+        throw new IOException("Invalid path string " + pathString);
+      }
+
+      pathString = pathString.replace('\\', '/');
+      return "file:" + (slashed ? "" : "/") + pathString;
+    }
+
+    // Is it a forward slash-separated absolute path?
+    if (windowsNonUriAbsolutePath2.matcher(pathString).find()) {
+      return "file:" + (slashed ? "" : "/") + pathString;
+    }
+
+    // Is it a backslash-separated relative file path (no scheme and
+    // no drive-letter specifier)?
+    if ((pathString.indexOf(':') == -1) && (pathString.indexOf('\\') != -1)) {
+      pathString = pathString.replace('\\', '/');
+    }
+
+    return pathString;
+  }
+
   /** Construct a URI from a String with unescaped special characters
   /** Construct a URI from a String with unescaped special characters
-   *  that have non-standard sematics. e.g. /, ?, #. A custom parsing
-   *  is needed to prevent misbihaviors.
+   *  that have non-standard semantics. e.g. /, ?, #. A custom parsing
+   *  is needed to prevent misbehavior.
    *  @param pathString The input path in string form
    *  @param pathString The input path in string form
    *  @return URI
    *  @return URI
    */
    */
-  private static URI stringToUri(String pathString) {
+  private static URI stringToUri(String pathString) throws IOException {
     // We can't use 'new URI(String)' directly. Since it doesn't do quoting
     // We can't use 'new URI(String)' directly. Since it doesn't do quoting
     // internally, the internal parser may fail or break the string at wrong
     // internally, the internal parser may fail or break the string at wrong
     // places. Use of multi-argument ctors will quote those chars for us,
     // places. Use of multi-argument ctors will quote those chars for us,
@@ -457,44 +529,28 @@ public class PathData implements Comparable<PathData> {
     // parse uri components
     // parse uri components
     String scheme = null;
     String scheme = null;
     String authority = null;
     String authority = null;
-
     int start = 0;
     int start = 0;
 
 
-    if (Path.WINDOWS) {
-      // Convert backslashes to prevent URI from escaping them.
-      pathString = pathString.replace('\\', '/');
-    }
-
-    if (Path.isWindowsAbsolutePath(pathString, false)) {
-      // So we don't attempt to parse the drive specifier as a scheme.
-      // Prefix a '/' to the scheme-specific part per RFC2936.
-      scheme = "file";
-      pathString = "/" + pathString;
-    } else if (Path.isWindowsAbsolutePath(pathString, true)){
-      // So we don't attempt to parse the drive specifier as a scheme.
-      // The scheme-specific part already begins with a '/'.
-      scheme = "file";
-    } else {
-      // parse uri scheme, if any
-      int colon = pathString.indexOf(':');
-      int slash = pathString.indexOf('/');
-      if (colon > 0 && (slash == colon +1)) {
-        // has a non zero-length scheme
-        scheme = pathString.substring(0, colon);
-        start = colon + 1;
-      }
+    pathString = normalizeWindowsPath(pathString);
 
 
-      // parse uri authority, if any
-      if (pathString.startsWith("//", start) &&
-          (pathString.length()-start > 2)) {
-        start += 2;
-        int nextSlash = pathString.indexOf('/', start);
-        int authEnd = nextSlash > 0 ? nextSlash : pathString.length();
-        authority = pathString.substring(start, authEnd);
-        start = authEnd;
-      }
+    // parse uri scheme, if any
+    int colon = pathString.indexOf(':');
+    int slash = pathString.indexOf('/');
+    if (colon > 0 && (slash == colon +1)) {
+      // has a non zero-length scheme
+      scheme = pathString.substring(0, colon);
+      start = colon + 1;
     }
     }
 
 
+    // parse uri authority, if any
+    if (pathString.startsWith("//", start) &&
+        (pathString.length()-start > 2)) {
+      start += 2;
+      int nextSlash = pathString.indexOf('/', start);
+      int authEnd = nextSlash > 0 ? nextSlash : pathString.length();
+      authority = pathString.substring(start, authEnd);
+      start = authEnd;
+    }
     // uri path is the rest of the string. ? or # are not interpreted,
     // uri path is the rest of the string. ? or # are not interpreted,
     // but any occurrence of them will be quoted by the URI ctor.
     // but any occurrence of them will be quoted by the URI ctor.
     String path = pathString.substring(start, pathString.length());
     String path = pathString.substring(start, pathString.length());

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java

@@ -203,8 +203,8 @@ public class ReadaheadPool {
       // It's also possible that we'll end up requesting readahead on some
       // It's also possible that we'll end up requesting readahead on some
       // other FD, which may be wasted work, but won't cause a problem.
       // other FD, which may be wasted work, but won't cause a problem.
       try {
       try {
-        NativeIO.posixFadviseIfPossible(fd, off, len,
-            NativeIO.POSIX_FADV_WILLNEED);
+        NativeIO.POSIX.posixFadviseIfPossible(fd, off, len,
+            NativeIO.POSIX.POSIX_FADV_WILLNEED);
       } catch (IOException ioe) {
       } catch (IOException ioe) {
         if (canceled) {
         if (canceled) {
           // no big deal - the reader canceled the request and closed
           // no big deal - the reader canceled the request and closed

+ 17 - 21
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java

@@ -22,6 +22,7 @@ import java.io.FileDescriptor;
 import java.io.FileInputStream;
 import java.io.FileInputStream;
 import java.io.FileOutputStream;
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.IOException;
+import java.util.Arrays;
 
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystem;
@@ -30,7 +31,7 @@ import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.io.nativeio.Errno;
 import org.apache.hadoop.io.nativeio.Errno;
 import org.apache.hadoop.io.nativeio.NativeIO;
 import org.apache.hadoop.io.nativeio.NativeIO;
 import org.apache.hadoop.io.nativeio.NativeIOException;
 import org.apache.hadoop.io.nativeio.NativeIOException;
-import org.apache.hadoop.io.nativeio.NativeIO.Stat;
+import org.apache.hadoop.io.nativeio.NativeIO.POSIX.Stat;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.UserGroupInformation;
 
 
 /**
 /**
@@ -120,7 +121,7 @@ public class SecureIOUtils {
     FileInputStream fis = new FileInputStream(f);
     FileInputStream fis = new FileInputStream(f);
     boolean success = false;
     boolean success = false;
     try {
     try {
-      Stat stat = NativeIO.getFstat(fis.getFD());
+      Stat stat = NativeIO.POSIX.getFstat(fis.getFD());
       checkStat(f, stat.getOwner(), stat.getGroup(), expectedOwner,
       checkStat(f, stat.getOwner(), stat.getGroup(), expectedOwner,
           expectedGroup);
           expectedGroup);
       success = true;
       success = true;
@@ -166,35 +167,30 @@ public class SecureIOUtils {
     if (skipSecurity) {
     if (skipSecurity) {
       return insecureCreateForWrite(f, permissions);
       return insecureCreateForWrite(f, permissions);
     } else {
     } else {
-      // Use the native wrapper around open(2)
-      try {
-        FileDescriptor fd = NativeIO.open(f.getAbsolutePath(),
-          NativeIO.O_WRONLY | NativeIO.O_CREAT | NativeIO.O_EXCL,
-          permissions);
-        return new FileOutputStream(fd);
-      } catch (NativeIOException nioe) {
-        if (nioe.getErrno() == Errno.EEXIST) {
-          throw new AlreadyExistsException(nioe);
-        }
-        throw nioe;
-      }
+      return NativeIO.getCreateForWriteFileOutputStream(f, permissions);
     }
     }
   }
   }
 
 
   private static void checkStat(File f, String owner, String group, 
   private static void checkStat(File f, String owner, String group, 
       String expectedOwner, 
       String expectedOwner, 
       String expectedGroup) throws IOException {
       String expectedGroup) throws IOException {
+    boolean success = true;
     if (expectedOwner != null &&
     if (expectedOwner != null &&
         !expectedOwner.equals(owner)) {
         !expectedOwner.equals(owner)) {
-      throw new IOException(
-        "Owner '" + owner + "' for path " + f + " did not match " +
-        "expected owner '" + expectedOwner + "'");
+      if (Path.WINDOWS) {
+        UserGroupInformation ugi =
+            UserGroupInformation.createRemoteUser(expectedOwner);
+        final String adminsGroupString = "Administrators";
+        success = owner.equals(adminsGroupString)
+            && Arrays.asList(ugi.getGroupNames()).contains(adminsGroupString);
+      } else {
+        success = false;
+      }
     }
     }
-    if (expectedGroup != null &&
-        !expectedGroup.equals(group)) {
+    if (!success) {
       throw new IOException(
       throw new IOException(
-        "Group '" + group + "' for path " + f + " did not match " +
-        "expected group '" + expectedGroup + "'");
+          "Owner '" + owner + "' for path " + f + " did not match " +
+              "expected owner '" + expectedOwner + "'");
     }
     }
   }
   }
 
 

+ 468 - 205
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java

@@ -17,8 +17,12 @@
  */
  */
 package org.apache.hadoop.io.nativeio;
 package org.apache.hadoop.io.nativeio;
 
 
+import java.io.File;
 import java.io.FileDescriptor;
 import java.io.FileDescriptor;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.IOException;
+import java.io.RandomAccessFile;
 import java.util.Map;
 import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentHashMap;
 
 
@@ -26,10 +30,13 @@ import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.io.SecureIOUtils.AlreadyExistsException;
 import org.apache.hadoop.util.NativeCodeLoader;
 import org.apache.hadoop.util.NativeCodeLoader;
+import org.apache.hadoop.util.Shell;
 
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.LogFactory;
+
 /**
 /**
  * JNI wrappers for various native IO-related calls not available in Java.
  * JNI wrappers for various native IO-related calls not available in Java.
  * These functions should generally be used alongside a fallback to another
  * These functions should generally be used alongside a fallback to another
@@ -38,81 +45,345 @@ import org.apache.commons.logging.LogFactory;
 @InterfaceAudience.Private
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 @InterfaceStability.Unstable
 public class NativeIO {
 public class NativeIO {
-  // Flags for open() call from bits/fcntl.h
-  public static final int O_RDONLY   =    00;
-  public static final int O_WRONLY   =    01;
-  public static final int O_RDWR     =    02;
-  public static final int O_CREAT    =  0100;
-  public static final int O_EXCL     =  0200;
-  public static final int O_NOCTTY   =  0400;
-  public static final int O_TRUNC    = 01000;
-  public static final int O_APPEND   = 02000;
-  public static final int O_NONBLOCK = 04000;
-  public static final int O_SYNC   =  010000;
-  public static final int O_ASYNC  =  020000;
-  public static final int O_FSYNC = O_SYNC;
-  public static final int O_NDELAY = O_NONBLOCK;
-
-  // Flags for posix_fadvise() from bits/fcntl.h
-  /* No further special treatment.  */
-  public static final int POSIX_FADV_NORMAL = 0; 
-  /* Expect random page references.  */
-  public static final int POSIX_FADV_RANDOM = 1; 
-  /* Expect sequential page references.  */
-  public static final int POSIX_FADV_SEQUENTIAL = 2; 
-  /* Will need these pages.  */
-  public static final int POSIX_FADV_WILLNEED = 3; 
-  /* Don't need these pages.  */
-  public static final int POSIX_FADV_DONTNEED = 4; 
-  /* Data will be accessed once.  */
-  public static final int POSIX_FADV_NOREUSE = 5; 
-
-
-  /* Wait upon writeout of all pages
-     in the range before performing the
-     write.  */
-  public static final int SYNC_FILE_RANGE_WAIT_BEFORE = 1;
-  /* Initiate writeout of all those
-     dirty pages in the range which are
-     not presently under writeback.  */
-  public static final int SYNC_FILE_RANGE_WRITE = 2;
-
-  /* Wait upon writeout of all pages in
-     the range after performing the
-     write.  */
-  public static final int SYNC_FILE_RANGE_WAIT_AFTER = 4;
+  public static class POSIX {
+    // Flags for open() call from bits/fcntl.h
+    public static final int O_RDONLY   =    00;
+    public static final int O_WRONLY   =    01;
+    public static final int O_RDWR     =    02;
+    public static final int O_CREAT    =  0100;
+    public static final int O_EXCL     =  0200;
+    public static final int O_NOCTTY   =  0400;
+    public static final int O_TRUNC    = 01000;
+    public static final int O_APPEND   = 02000;
+    public static final int O_NONBLOCK = 04000;
+    public static final int O_SYNC   =  010000;
+    public static final int O_ASYNC  =  020000;
+    public static final int O_FSYNC = O_SYNC;
+    public static final int O_NDELAY = O_NONBLOCK;
+
+    // Flags for posix_fadvise() from bits/fcntl.h
+    /* No further special treatment.  */
+    public static final int POSIX_FADV_NORMAL = 0;
+    /* Expect random page references.  */
+    public static final int POSIX_FADV_RANDOM = 1;
+    /* Expect sequential page references.  */
+    public static final int POSIX_FADV_SEQUENTIAL = 2;
+    /* Will need these pages.  */
+    public static final int POSIX_FADV_WILLNEED = 3;
+    /* Don't need these pages.  */
+    public static final int POSIX_FADV_DONTNEED = 4;
+    /* Data will be accessed once.  */
+    public static final int POSIX_FADV_NOREUSE = 5;
+
+
+    /* Wait upon writeout of all pages
+       in the range before performing the
+       write.  */
+    public static final int SYNC_FILE_RANGE_WAIT_BEFORE = 1;
+    /* Initiate writeout of all those
+       dirty pages in the range which are
+       not presently under writeback.  */
+    public static final int SYNC_FILE_RANGE_WRITE = 2;
+
+    /* Wait upon writeout of all pages in
+       the range after performing the
+       write.  */
+    public static final int SYNC_FILE_RANGE_WAIT_AFTER = 4;
+
+    private static final Log LOG = LogFactory.getLog(NativeIO.class);
+
+    private static boolean nativeLoaded = false;
+    private static boolean fadvisePossible = true;
+    private static boolean syncFileRangePossible = true;
+
+    static final String WORKAROUND_NON_THREADSAFE_CALLS_KEY =
+      "hadoop.workaround.non.threadsafe.getpwuid";
+    static final boolean WORKAROUND_NON_THREADSAFE_CALLS_DEFAULT = false;
+
+    private static long cacheTimeout = -1;
+
+    static {
+      if (NativeCodeLoader.isNativeCodeLoaded()) {
+        try {
+          Configuration conf = new Configuration();
+          workaroundNonThreadSafePasswdCalls = conf.getBoolean(
+            WORKAROUND_NON_THREADSAFE_CALLS_KEY,
+            WORKAROUND_NON_THREADSAFE_CALLS_DEFAULT);
+
+          initNative();
+          nativeLoaded = true;
+
+          cacheTimeout = conf.getLong(
+            CommonConfigurationKeys.HADOOP_SECURITY_UID_NAME_CACHE_TIMEOUT_KEY,
+            CommonConfigurationKeys.HADOOP_SECURITY_UID_NAME_CACHE_TIMEOUT_DEFAULT) *
+            1000;
+          LOG.debug("Initialized cache for IDs to User/Group mapping with a " +
+            " cache timeout of " + cacheTimeout/1000 + " seconds.");
+
+        } catch (Throwable t) {
+          // This can happen if the user has an older version of libhadoop.so
+          // installed - in this case we can continue without native IO
+          // after warning
+          LOG.error("Unable to initialize NativeIO libraries", t);
+        }
+      }
+    }
 
 
-  private static final Log LOG = LogFactory.getLog(NativeIO.class);
+    /**
+     * Return true if the JNI-based native IO extensions are available.
+     */
+    public static boolean isAvailable() {
+      return NativeCodeLoader.isNativeCodeLoaded() && nativeLoaded;
+    }
+
+    /** Wrapper around open(2) */
+    public static native FileDescriptor open(String path, int flags, int mode) throws IOException;
+    /** Wrapper around fstat(2) */
+    private static native Stat fstat(FileDescriptor fd) throws IOException;
+
+    /** Native chmod implementation. On UNIX, it is a wrapper around chmod(2) */
+    private static native void chmodImpl(String path, int mode) throws IOException;
+
+    public static void chmod(String path, int mode) throws IOException {
+      if (!Shell.WINDOWS) {
+        chmodImpl(path, mode);
+      } else {
+        try {
+          chmodImpl(path, mode);
+        } catch (NativeIOException nioe) {
+          if (nioe.getErrorCode() == 3) {
+            throw new NativeIOException("No such file or directory",
+                Errno.ENOENT);
+          } else {
+            LOG.warn(String.format("NativeIO.chmod error (%d): %s",
+                nioe.getErrorCode(), nioe.getMessage()));
+            throw new NativeIOException("Unknown error", Errno.UNKNOWN);
+          }
+        }
+      }
+    }
+
+    /** Wrapper around posix_fadvise(2) */
+    static native void posix_fadvise(
+      FileDescriptor fd, long offset, long len, int flags) throws NativeIOException;
+
+    /** Wrapper around sync_file_range(2) */
+    static native void sync_file_range(
+      FileDescriptor fd, long offset, long nbytes, int flags) throws NativeIOException;
+
+    /**
+     * Call posix_fadvise on the given file descriptor. See the manpage
+     * for this syscall for more information. On systems where this
+     * call is not available, does nothing.
+     *
+     * @throws NativeIOException if there is an error with the syscall
+     */
+    public static void posixFadviseIfPossible(
+        FileDescriptor fd, long offset, long len, int flags)
+        throws NativeIOException {
+      if (nativeLoaded && fadvisePossible) {
+        try {
+          posix_fadvise(fd, offset, len, flags);
+        } catch (UnsupportedOperationException uoe) {
+          fadvisePossible = false;
+        } catch (UnsatisfiedLinkError ule) {
+          fadvisePossible = false;
+        }
+      }
+    }
+
+    /**
+     * Call sync_file_range on the given file descriptor. See the manpage
+     * for this syscall for more information. On systems where this
+     * call is not available, does nothing.
+     *
+     * @throws NativeIOException if there is an error with the syscall
+     */
+    public static void syncFileRangeIfPossible(
+        FileDescriptor fd, long offset, long nbytes, int flags)
+        throws NativeIOException {
+      if (nativeLoaded && syncFileRangePossible) {
+        try {
+          sync_file_range(fd, offset, nbytes, flags);
+        } catch (UnsupportedOperationException uoe) {
+          syncFileRangePossible = false;
+        } catch (UnsatisfiedLinkError ule) {
+          syncFileRangePossible = false;
+        }
+      }
+    }
+
+    /** Linux only methods used for getOwner() implementation */
+    private static native long getUIDforFDOwnerforOwner(FileDescriptor fd) throws IOException;
+    private static native String getUserName(long uid) throws IOException;
+
+    /**
+     * Result type of the fstat call
+     */
+    public static class Stat {
+      private int ownerId, groupId;
+      private String owner, group;
+      private int mode;
+
+      // Mode constants
+      public static final int S_IFMT = 0170000;      /* type of file */
+      public static final int   S_IFIFO  = 0010000;  /* named pipe (fifo) */
+      public static final int   S_IFCHR  = 0020000;  /* character special */
+      public static final int   S_IFDIR  = 0040000;  /* directory */
+      public static final int   S_IFBLK  = 0060000;  /* block special */
+      public static final int   S_IFREG  = 0100000;  /* regular */
+      public static final int   S_IFLNK  = 0120000;  /* symbolic link */
+      public static final int   S_IFSOCK = 0140000;  /* socket */
+      public static final int   S_IFWHT  = 0160000;  /* whiteout */
+      public static final int S_ISUID = 0004000;  /* set user id on execution */
+      public static final int S_ISGID = 0002000;  /* set group id on execution */
+      public static final int S_ISVTX = 0001000;  /* save swapped text even after use */
+      public static final int S_IRUSR = 0000400;  /* read permission, owner */
+      public static final int S_IWUSR = 0000200;  /* write permission, owner */
+      public static final int S_IXUSR = 0000100;  /* execute/search permission, owner */
+
+      Stat(int ownerId, int groupId, int mode) {
+        this.ownerId = ownerId;
+        this.groupId = groupId;
+        this.mode = mode;
+      }
+
+      @Override
+      public String toString() {
+        return "Stat(owner='" + owner + "', group='" + group + "'" +
+          ", mode=" + mode + ")";
+      }
+
+      public String getOwner() {
+        return owner;
+      }
+      public String getGroup() {
+        return group;
+      }
+      public int getMode() {
+        return mode;
+      }
+    }
+
+    /**
+     * Returns the file stat for a file descriptor.
+     *
+     * @param fd file descriptor.
+     * @return the file descriptor file stat.
+     * @throws IOException thrown if there was an IO error while obtaining the file stat.
+     */
+    public static Stat getFstat(FileDescriptor fd) throws IOException {
+      Stat stat = fstat(fd);
+      stat.owner = getName(IdCache.USER, stat.ownerId);
+      stat.group = getName(IdCache.GROUP, stat.groupId);
+      return stat;
+    }
+
+    private static String getName(IdCache domain, int id) throws IOException {
+      Map<Integer, CachedName> idNameCache = (domain == IdCache.USER)
+        ? USER_ID_NAME_CACHE : GROUP_ID_NAME_CACHE;
+      String name;
+      CachedName cachedName = idNameCache.get(id);
+      long now = System.currentTimeMillis();
+      if (cachedName != null && (cachedName.timestamp + cacheTimeout) > now) {
+        name = cachedName.name;
+      } else {
+        name = (domain == IdCache.USER) ? getUserName(id) : getGroupName(id);
+        if (LOG.isDebugEnabled()) {
+          String type = (domain == IdCache.USER) ? "UserName" : "GroupName";
+          LOG.debug("Got " + type + " " + name + " for ID " + id +
+            " from the native implementation");
+        }
+        cachedName = new CachedName(name, now);
+        idNameCache.put(id, cachedName);
+      }
+      return name;
+    }
+
+    static native String getUserName(int uid) throws IOException;
+    static native String getGroupName(int uid) throws IOException;
+
+    private static class CachedName {
+      final long timestamp;
+      final String name;
+
+      public CachedName(String name, long timestamp) {
+        this.name = name;
+        this.timestamp = timestamp;
+      }
+    }
+
+    private static final Map<Integer, CachedName> USER_ID_NAME_CACHE =
+      new ConcurrentHashMap<Integer, CachedName>();
+
+    private static final Map<Integer, CachedName> GROUP_ID_NAME_CACHE =
+      new ConcurrentHashMap<Integer, CachedName>();
+
+    private enum IdCache { USER, GROUP }
+  }
 
 
-  private static boolean nativeLoaded = false;
   private static boolean workaroundNonThreadSafePasswdCalls = false;
   private static boolean workaroundNonThreadSafePasswdCalls = false;
-  private static boolean fadvisePossible = true;
-  private static boolean syncFileRangePossible = true;
 
 
-  static final String WORKAROUND_NON_THREADSAFE_CALLS_KEY =
-    "hadoop.workaround.non.threadsafe.getpwuid";
-  static final boolean WORKAROUND_NON_THREADSAFE_CALLS_DEFAULT = false;
 
 
-  private static long cacheTimeout = -1;
+  public static class Windows {
+    // Flags for CreateFile() call on Windows
+    public static final long GENERIC_READ = 0x80000000L;
+    public static final long GENERIC_WRITE = 0x40000000L;
+
+    public static final long FILE_SHARE_READ = 0x00000001L;
+    public static final long FILE_SHARE_WRITE = 0x00000002L;
+    public static final long FILE_SHARE_DELETE = 0x00000004L;
+
+    public static final long CREATE_NEW = 1;
+    public static final long CREATE_ALWAYS = 2;
+    public static final long OPEN_EXISTING = 3;
+    public static final long OPEN_ALWAYS = 4;
+    public static final long TRUNCATE_EXISTING = 5;
+
+    public static final long FILE_BEGIN = 0;
+    public static final long FILE_CURRENT = 1;
+    public static final long FILE_END = 2;
+
+    /** Wrapper around CreateFile() on Windows */
+    public static native FileDescriptor createFile(String path,
+        long desiredAccess, long shareMode, long creationDisposition)
+        throws IOException;
+
+    /** Wrapper around SetFilePointer() on Windows */
+    public static native long setFilePointer(FileDescriptor fd,
+        long distanceToMove, long moveMethod) throws IOException;
+
+    /** Windows only methods used for getOwner() implementation */
+    private static native String getOwner(FileDescriptor fd) throws IOException;
+
+    /** Windows only method used for getting the file length */
+    public static native long getLengthFollowSymlink(
+        String path) throws IOException;
+
+    static {
+      if (NativeCodeLoader.isNativeCodeLoaded()) {
+        try {
+          initNative();
+          nativeLoaded = true;
+        } catch (Throwable t) {
+          // This can happen if the user has an older version of libhadoop.so
+          // installed - in this case we can continue without native IO
+          // after warning
+          LOG.error("Unable to initialize NativeIO libraries", t);
+        }
+      }
+    }
+  }
+
+  private static final Log LOG = LogFactory.getLog(NativeIO.class);
+
+  private static boolean nativeLoaded = false;
 
 
   static {
   static {
     if (NativeCodeLoader.isNativeCodeLoaded()) {
     if (NativeCodeLoader.isNativeCodeLoaded()) {
       try {
       try {
-        Configuration conf = new Configuration();
-        workaroundNonThreadSafePasswdCalls = conf.getBoolean(
-          WORKAROUND_NON_THREADSAFE_CALLS_KEY,
-          WORKAROUND_NON_THREADSAFE_CALLS_DEFAULT);
-
         initNative();
         initNative();
         nativeLoaded = true;
         nativeLoaded = true;
-
-        cacheTimeout = conf.getLong(
-          CommonConfigurationKeys.HADOOP_SECURITY_UID_NAME_CACHE_TIMEOUT_KEY,
-          CommonConfigurationKeys.HADOOP_SECURITY_UID_NAME_CACHE_TIMEOUT_DEFAULT) *
-          1000;
-        LOG.debug("Initialized cache for IDs to User/Group mapping with a" +
-          " cache timeout of " + cacheTimeout/1000 + " seconds.");
-
       } catch (Throwable t) {
       } catch (Throwable t) {
         // This can happen if the user has an older version of libhadoop.so
         // This can happen if the user has an older version of libhadoop.so
         // installed - in this case we can continue without native IO
         // installed - in this case we can continue without native IO
@@ -129,168 +400,160 @@ public class NativeIO {
     return NativeCodeLoader.isNativeCodeLoaded() && nativeLoaded;
     return NativeCodeLoader.isNativeCodeLoaded() && nativeLoaded;
   }
   }
 
 
-  /** Wrapper around open(2) */
-  public static native FileDescriptor open(String path, int flags, int mode) throws IOException;
-  /** Wrapper around fstat(2) */
-  private static native Stat fstat(FileDescriptor fd) throws IOException;
-  /** Wrapper around chmod(2) */
-  public static native void chmod(String path, int mode) throws IOException;
-
-  /** Wrapper around posix_fadvise(2) */
-  static native void posix_fadvise(
-    FileDescriptor fd, long offset, long len, int flags) throws NativeIOException;
-
-  /** Wrapper around sync_file_range(2) */
-  static native void sync_file_range(
-    FileDescriptor fd, long offset, long nbytes, int flags) throws NativeIOException;
-
   /** Initialize the JNI method ID and class ID cache */
   /** Initialize the JNI method ID and class ID cache */
   private static native void initNative();
   private static native void initNative();
 
 
-  /**
-   * Call posix_fadvise on the given file descriptor. See the manpage
-   * for this syscall for more information. On systems where this
-   * call is not available, does nothing.
-   *
-   * @throws NativeIOException if there is an error with the syscall
-   */
-  public static void posixFadviseIfPossible(
-      FileDescriptor fd, long offset, long len, int flags)
-      throws NativeIOException {
-    if (nativeLoaded && fadvisePossible) {
-      try {
-        posix_fadvise(fd, offset, len, flags);
-      } catch (UnsupportedOperationException uoe) {
-        fadvisePossible = false;
-      } catch (UnsatisfiedLinkError ule) {
-        fadvisePossible = false;
-      }
+  private static class CachedUid {
+    final long timestamp;
+    final String username;
+    public CachedUid(String username, long timestamp) {
+      this.timestamp = timestamp;
+      this.username = username;
     }
     }
   }
   }
-
-  /**
-   * Call sync_file_range on the given file descriptor. See the manpage
-   * for this syscall for more information. On systems where this
-   * call is not available, does nothing.
-   *
-   * @throws NativeIOException if there is an error with the syscall
-   */
-  public static void syncFileRangeIfPossible(
-      FileDescriptor fd, long offset, long nbytes, int flags)
-      throws NativeIOException {
-    if (nativeLoaded && syncFileRangePossible) {
-      try {
-        sync_file_range(fd, offset, nbytes, flags);
-      } catch (UnsupportedOperationException uoe) {
-        syncFileRangePossible = false;
-      } catch (UnsatisfiedLinkError ule) {
-        syncFileRangePossible = false;
+  private static final Map<Long, CachedUid> uidCache =
+      new ConcurrentHashMap<Long, CachedUid>();
+  private static long cacheTimeout;
+  private static boolean initialized = false;
+
+  public static String getOwner(FileDescriptor fd) throws IOException {
+    ensureInitialized();
+    if (Shell.WINDOWS) {
+      String owner = Windows.getOwner(fd);
+      int i = owner.indexOf('\\');
+      if (i != -1)
+        owner = owner.substring(i + 1);
+      return owner;
+    } else {
+      long uid = POSIX.getUIDforFDOwnerforOwner(fd);
+      CachedUid cUid = uidCache.get(uid);
+      long now = System.currentTimeMillis();
+      if (cUid != null && (cUid.timestamp + cacheTimeout) > now) {
+        return cUid.username;
       }
       }
+      String user = POSIX.getUserName(uid);
+      LOG.info("Got UserName " + user + " for UID " + uid
+          + " from the native implementation");
+      cUid = new CachedUid(user, now);
+      uidCache.put(uid, cUid);
+      return user;
     }
     }
   }
   }
 
 
   /**
   /**
-   * Result type of the fstat call
+   * Create a FileInputStream that shares delete permission on the
+   * file opened, i.e. other process can delete the file the
+   * FileInputStream is reading. Only Windows implementation uses
+   * the native interface.
    */
    */
-  public static class Stat {
-    private int ownerId, groupId;
-    private String owner, group;
-    private int mode;
-
-    // Mode constants
-    public static final int S_IFMT = 0170000;      /* type of file */
-    public static final int   S_IFIFO  = 0010000;  /* named pipe (fifo) */
-    public static final int   S_IFCHR  = 0020000;  /* character special */
-    public static final int   S_IFDIR  = 0040000;  /* directory */
-    public static final int   S_IFBLK  = 0060000;  /* block special */
-    public static final int   S_IFREG  = 0100000;  /* regular */
-    public static final int   S_IFLNK  = 0120000;  /* symbolic link */
-    public static final int   S_IFSOCK = 0140000;  /* socket */
-    public static final int   S_IFWHT  = 0160000;  /* whiteout */
-    public static final int S_ISUID = 0004000;  /* set user id on execution */
-    public static final int S_ISGID = 0002000;  /* set group id on execution */
-    public static final int S_ISVTX = 0001000;  /* save swapped text even after use */
-    public static final int S_IRUSR = 0000400;  /* read permission, owner */
-    public static final int S_IWUSR = 0000200;  /* write permission, owner */
-    public static final int S_IXUSR = 0000100;  /* execute/search permission, owner */
-
-    Stat(int ownerId, int groupId, int mode) {
-      this.ownerId = ownerId;
-      this.groupId = groupId;
-      this.mode = mode;
-    }
-
-    @Override
-    public String toString() {
-      return "Stat(owner='" + owner + "', group='" + group + "'" +
-        ", mode=" + mode + ")";
-    }
-
-    public String getOwner() {
-      return owner;
-    }
-    public String getGroup() {
-      return group;
-    }
-    public int getMode() {
-      return mode;
+  public static FileInputStream getShareDeleteFileInputStream(File f)
+      throws IOException {
+    if (!Shell.WINDOWS) {
+      // On Linux the default FileInputStream shares delete permission
+      // on the file opened.
+      //
+      return new FileInputStream(f);
+    } else {
+      // Use Windows native interface to create a FileInputStream that
+      // shares delete permission on the file opened.
+      //
+      FileDescriptor fd = Windows.createFile(
+          f.getAbsolutePath(),
+          Windows.GENERIC_READ,
+          Windows.FILE_SHARE_READ |
+              Windows.FILE_SHARE_WRITE |
+              Windows.FILE_SHARE_DELETE,
+          Windows.OPEN_EXISTING);
+      return new FileInputStream(fd);
     }
     }
   }
   }
 
 
-  static native String getUserName(int uid) throws IOException;
-
-  static native String getGroupName(int uid) throws IOException;
-
-  private static class CachedName {
-    final long timestamp;
-    final String name;
-
-    public CachedName(String name, long timestamp) {
-      this.name = name;
-      this.timestamp = timestamp;
+  /**
+   * Create a FileInputStream that shares delete permission on the
+   * file opened at a given offset, i.e. other process can delete
+   * the file the FileInputStream is reading. Only Windows implementation
+   * uses the native interface.
+   */
+  public static FileInputStream getShareDeleteFileInputStream(File f, long seekOffset)
+      throws IOException {
+    if (!Shell.WINDOWS) {
+      RandomAccessFile rf = new RandomAccessFile(f, "r");
+      if (seekOffset > 0) {
+        rf.seek(seekOffset);
+      }
+      return new FileInputStream(rf.getFD());
+    } else {
+      // Use Windows native interface to create a FileInputStream that
+      // shares delete permission on the file opened, and set it to the
+      // given offset.
+      //
+      FileDescriptor fd = NativeIO.Windows.createFile(
+          f.getAbsolutePath(),
+          NativeIO.Windows.GENERIC_READ,
+          NativeIO.Windows.FILE_SHARE_READ |
+              NativeIO.Windows.FILE_SHARE_WRITE |
+              NativeIO.Windows.FILE_SHARE_DELETE,
+          NativeIO.Windows.OPEN_EXISTING);
+      if (seekOffset > 0)
+        NativeIO.Windows.setFilePointer(fd, seekOffset, NativeIO.Windows.FILE_BEGIN);
+      return new FileInputStream(fd);
     }
     }
   }
   }
 
 
-  private static final Map<Integer, CachedName> USER_ID_NAME_CACHE =
-    new ConcurrentHashMap<Integer, CachedName>();
-
-  private static final Map<Integer, CachedName> GROUP_ID_NAME_CACHE =
-    new ConcurrentHashMap<Integer, CachedName>();
-
-  private enum IdCache { USER, GROUP }
-
-  private static String getName(IdCache domain, int id) throws IOException {
-    Map<Integer, CachedName> idNameCache = (domain == IdCache.USER)
-      ? USER_ID_NAME_CACHE : GROUP_ID_NAME_CACHE;
-    String name;
-    CachedName cachedName = idNameCache.get(id);
-    long now = System.currentTimeMillis();
-    if (cachedName != null && (cachedName.timestamp + cacheTimeout) > now) {
-      name = cachedName.name;
+  /**
+   * Create the specified File for write access, ensuring that it does not exist.
+   * @param f the file that we want to create
+   * @param permissions we want to have on the file (if security is enabled)
+   *
+   * @throws AlreadyExistsException if the file already exists
+   * @throws IOException if any other error occurred
+   */
+  public static FileOutputStream getCreateForWriteFileOutputStream(File f, int permissions)
+      throws IOException {
+    if (!Shell.WINDOWS) {
+      // Use the native wrapper around open(2)
+      try {
+        FileDescriptor fd = NativeIO.POSIX.open(f.getAbsolutePath(),
+            NativeIO.POSIX.O_WRONLY | NativeIO.POSIX.O_CREAT
+                | NativeIO.POSIX.O_EXCL, permissions);
+        return new FileOutputStream(fd);
+      } catch (NativeIOException nioe) {
+        if (nioe.getErrno() == Errno.EEXIST) {
+          throw new AlreadyExistsException(nioe);
+        }
+        throw nioe;
+      }
     } else {
     } else {
-      name = (domain == IdCache.USER) ? getUserName(id) : getGroupName(id);
-      if (LOG.isDebugEnabled()) {
-        String type = (domain == IdCache.USER) ? "UserName" : "GroupName";
-        LOG.debug("Got " + type + " " + name + " for ID " + id +
-          " from the native implementation");
+      // Use the Windows native APIs to create equivalent FileOutputStream
+      try {
+        FileDescriptor fd = NativeIO.Windows.createFile(f.getCanonicalPath(),
+            NativeIO.Windows.GENERIC_WRITE,
+            NativeIO.Windows.FILE_SHARE_DELETE
+                | NativeIO.Windows.FILE_SHARE_READ
+                | NativeIO.Windows.FILE_SHARE_WRITE,
+            NativeIO.Windows.CREATE_NEW);
+        NativeIO.POSIX.chmod(f.getCanonicalPath(), permissions);
+        return new FileOutputStream(fd);
+      } catch (NativeIOException nioe) {
+        if (nioe.getErrorCode() == 80) {
+          // ERROR_FILE_EXISTS
+          // 80 (0x50)
+          // The file exists
+          throw new AlreadyExistsException(nioe);
+        }
+        throw nioe;
       }
       }
-      cachedName = new CachedName(name, now);
-      idNameCache.put(id, cachedName);
     }
     }
-    return name;
   }
   }
 
 
-  /**
-   * Returns the file stat for a file descriptor.
-   *
-   * @param fd file descriptor.
-   * @return the file descriptor file stat.
-   * @throws IOException thrown if there was an IO error while obtaining the file stat.
-   */
-  public static Stat getFstat(FileDescriptor fd) throws IOException {
-    Stat stat = fstat(fd);
-    stat.owner = getName(IdCache.USER, stat.ownerId);
-    stat.group = getName(IdCache.GROUP, stat.groupId);
-    return stat;
+  private synchronized static void ensureInitialized() {
+    if (!initialized) {
+      cacheTimeout =
+          new Configuration().getLong("hadoop.security.uid.cache.secs",
+              4*60*60) * 1000;
+      LOG.info("Initialized cache for UID to User mapping with a cache" +
+          " timeout of " + cacheTimeout/1000 + " seconds.");
+      initialized = true;
+    }
   }
   }
 }
 }

+ 25 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIOException.java

@@ -18,20 +18,40 @@
 package org.apache.hadoop.io.nativeio;
 package org.apache.hadoop.io.nativeio;
 
 
 import java.io.IOException;
 import java.io.IOException;
+import org.apache.hadoop.util.Shell;
+
 
 
 /**
 /**
  * An exception generated by a call to the native IO code.
  * An exception generated by a call to the native IO code.
  *
  *
- * These exceptions simply wrap <i>errno</i> result codes.
+ * These exceptions simply wrap <i>errno</i> result codes on Linux,
+ * or the System Error Code on Windows.
  */
  */
 public class NativeIOException extends IOException {
 public class NativeIOException extends IOException {
   private static final long serialVersionUID = 1L;
   private static final long serialVersionUID = 1L;
 
 
   private Errno errno;
   private Errno errno;
 
 
+  // Java has no unsigned primitive error code. Use a signed 32-bit
+  // integer to hold the unsigned 32-bit integer.
+  private int errorCode;
+
   public NativeIOException(String msg, Errno errno) {
   public NativeIOException(String msg, Errno errno) {
     super(msg);
     super(msg);
     this.errno = errno;
     this.errno = errno;
+    // Windows error code is always set to ERROR_SUCCESS on Linux,
+    // i.e. no failure on Windows
+    this.errorCode = 0;
+  }
+
+  public NativeIOException(String msg, int errorCode) {
+    super(msg);
+    this.errorCode = errorCode;
+    this.errno = Errno.UNKNOWN;
+  }
+
+  public long getErrorCode() {
+    return errorCode;
   }
   }
 
 
   public Errno getErrno() {
   public Errno getErrno() {
@@ -40,8 +60,10 @@ public class NativeIOException extends IOException {
 
 
   @Override
   @Override
   public String toString() {
   public String toString() {
-    return errno.toString() + ": " + super.getMessage();
+    if (Shell.WINDOWS)
+      return errorCode + ": " + super.getMessage();
+    else
+      return errno.toString() + ": " + super.getMessage();
   }
   }
 }
 }
 
 
-

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCrc32.java

@@ -60,7 +60,7 @@ class NativeCrc32 {
         fileName, basePos);
         fileName, basePos);
   }
   }
   
   
-  private static native void nativeVerifyChunkedSums(
+    private static native void nativeVerifyChunkedSums(
       int bytesPerSum, int checksumType,
       int bytesPerSum, int checksumType,
       ByteBuffer sums, int sumsOffset,
       ByteBuffer sums, int sumsOffset,
       ByteBuffer data, int dataOffset, int dataLength,
       ByteBuffer data, int dataOffset, int dataLength,

+ 4 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PlatformName.java

@@ -32,9 +32,10 @@ public class PlatformName {
    * The complete platform 'name' to identify the platform as 
    * The complete platform 'name' to identify the platform as 
    * per the java-vm.
    * per the java-vm.
    */
    */
-  private static final String platformName = System.getProperty("os.name") + "-" + 
-    System.getProperty("os.arch") + "-" +
-    System.getProperty("sun.arch.data.model");
+  private static final String platformName =
+      (Shell.WINDOWS ? System.getenv("os") : System.getProperty("os.name"))
+      + "-" + System.getProperty("os.arch")
+      + "-" + System.getProperty("sun.arch.data.model");
   
   
   /**
   /**
    * Get the complete platform as per the java-vm.
    * Get the complete platform as per the java-vm.

+ 32 - 0
hadoop-common-project/hadoop-common/src/main/native/native.sln

@@ -0,0 +1,32 @@
+
+Microsoft Visual Studio Solution File, Format Version 11.00
+# Visual Studio 2010
+Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "native", "native.vcxproj", "{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}"
+EndProject
+Global
+	GlobalSection(SolutionConfigurationPlatforms) = preSolution
+		Debug|Mixed Platforms = Debug|Mixed Platforms
+		Debug|Win32 = Debug|Win32
+		Debug|x64 = Debug|x64
+		Release|Mixed Platforms = Release|Mixed Platforms
+		Release|Win32 = Release|Win32
+		Release|x64 = Release|x64
+	EndGlobalSection
+	GlobalSection(ProjectConfigurationPlatforms) = postSolution
+		{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|Mixed Platforms.ActiveCfg = Release|x64
+		{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|Mixed Platforms.Build.0 = Release|x64
+		{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|Win32.ActiveCfg = Release|x64
+		{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|Win32.Build.0 = Release|x64
+		{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|x64.ActiveCfg = Release|x64
+		{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|x64.Build.0 = Release|x64
+		{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|Mixed Platforms.ActiveCfg = Release|x64
+		{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|Mixed Platforms.Build.0 = Release|x64
+		{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|Win32.ActiveCfg = Release|x64
+		{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|Win32.Build.0 = Release|x64
+		{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|x64.ActiveCfg = Release|x64
+		{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|x64.Build.0 = Release|x64
+	EndGlobalSection
+	GlobalSection(SolutionProperties) = preSolution
+		HideSolutionNode = FALSE
+	EndGlobalSection
+EndGlobal

+ 72 - 0
hadoop-common-project/hadoop-common/src/main/native/native.vcxproj

@@ -0,0 +1,72 @@
+<?xml version="1.0" encoding="utf-8"?>
+<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+  <ItemGroup Label="ProjectConfigurations">
+    <ProjectConfiguration Include="Release|x64">
+      <Configuration>Release</Configuration>
+      <Platform>x64</Platform>
+    </ProjectConfiguration>
+  </ItemGroup>
+  <PropertyGroup Label="Globals">
+    <ProjectGuid>{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}</ProjectGuid>
+    <Keyword>Win32Proj</Keyword>
+    <RootNamespace>native</RootNamespace>
+  </PropertyGroup>
+  <Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration">
+    <ConfigurationType>DynamicLibrary</ConfigurationType>
+    <UseDebugLibraries>false</UseDebugLibraries>
+    <WholeProgramOptimization>true</WholeProgramOptimization>
+    <CharacterSet>Unicode</CharacterSet>
+  </PropertyGroup>
+  <Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
+  <ImportGroup Label="ExtensionSettings">
+  </ImportGroup>
+  <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="PropertySheets">
+    <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
+  </ImportGroup>
+  <PropertyGroup Label="UserMacros" />
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+    <LinkIncremental>false</LinkIncremental>
+    <OutDir>..\..\..\target\bin\</OutDir>
+    <IntDir>..\..\..\target\native\$(Configuration)\</IntDir>
+    <TargetName>hadoop</TargetName>
+  </PropertyGroup>
+  <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+    <ClCompile>
+      <WarningLevel>Level3</WarningLevel>
+      <PrecompiledHeader>NotUsing</PrecompiledHeader>
+      <Optimization>MaxSpeed</Optimization>
+      <FunctionLevelLinking>true</FunctionLevelLinking>
+      <IntrinsicFunctions>true</IntrinsicFunctions>
+      <PreprocessorDefinitions>WIN32;NDEBUG;_WINDOWS;_USRDLL;NATIVE_EXPORTS;%(PreprocessorDefinitions)</PreprocessorDefinitions>
+      <AdditionalIncludeDirectories>..\winutils\include;..\..\..\target\native\javah;%JAVA_HOME%\include;%JAVA_HOME%\include\win32;.\src;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
+      <CompileAs>CompileAsC</CompileAs>
+    </ClCompile>
+    <Link>
+      <SubSystem>Windows</SubSystem>
+      <GenerateDebugInformation>true</GenerateDebugInformation>
+      <EnableCOMDATFolding>true</EnableCOMDATFolding>
+      <OptimizeReferences>true</OptimizeReferences>
+      <AdditionalDependencies>Ws2_32.lib;libwinutils.lib;%(AdditionalDependencies)</AdditionalDependencies>
+      <AdditionalLibraryDirectories>..\..\..\target\bin;%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
+    </Link>
+  </ItemDefinitionGroup>
+  <ItemGroup>
+    <ClCompile Include="src\org\apache\hadoop\io\nativeio\file_descriptor.c" />
+    <ClCompile Include="src\org\apache\hadoop\io\nativeio\NativeIO.c" />
+    <ClCompile Include="src\org\apache\hadoop\util\bulk_crc32.c" />
+    <ClCompile Include="src\org\apache\hadoop\util\NativeCrc32.c" />
+  </ItemGroup>
+  <ItemGroup>
+    <ClInclude Include="..\src\org\apache\hadoop\util\crc32c_tables.h" />
+    <ClInclude Include="..\src\org\apache\hadoop\util\crc32_zlib_polynomial_tables.h" />
+    <ClInclude Include="src\org\apache\hadoop\io\nativeio\file_descriptor.h" />
+    <ClInclude Include="src\org\apache\hadoop\util\bulk_crc32.h" />
+    <ClInclude Include="src\org\apache\hadoop\util\crc32c_tables.h" />
+    <ClInclude Include="src\org\apache\hadoop\util\crc32_zlib_polynomial_tables.h" />
+    <ClInclude Include="src\org_apache_hadoop.h" />
+  </ItemGroup>
+  <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
+  <ImportGroup Label="ExtensionTargets">
+  </ImportGroup>
+</Project>

+ 54 - 0
hadoop-common-project/hadoop-common/src/main/native/native.vcxproj.filters

@@ -0,0 +1,54 @@
+<?xml version="1.0" encoding="utf-8"?>
+<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+  <ItemGroup>
+    <Filter Include="Source Files">
+      <UniqueIdentifier>{4FC737F1-C7A5-4376-A066-2A32D752A2FF}</UniqueIdentifier>
+      <Extensions>cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx</Extensions>
+    </Filter>
+    <Filter Include="Header Files">
+      <UniqueIdentifier>{93995380-89BD-4b04-88EB-625FBE52EBFB}</UniqueIdentifier>
+      <Extensions>h;hpp;hxx;hm;inl;inc;xsd</Extensions>
+    </Filter>
+    <Filter Include="Resource Files">
+      <UniqueIdentifier>{67DA6AB6-F800-4c08-8B7A-83BB121AAD01}</UniqueIdentifier>
+      <Extensions>rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms</Extensions>
+    </Filter>
+  </ItemGroup>
+  <ItemGroup>
+    <ClCompile Include="src\org\apache\hadoop\io\nativeio\NativeIO.c">
+      <Filter>Source Files</Filter>
+    </ClCompile>
+    <ClCompile Include="src\org\apache\hadoop\io\nativeio\file_descriptor.c">
+      <Filter>Source Files</Filter>
+    </ClCompile>
+    <ClCompile Include="src\org\apache\hadoop\util\bulk_crc32.c">
+      <Filter>Source Files</Filter>
+    </ClCompile>
+    <ClCompile Include="src\org\apache\hadoop\util\NativeCrc32.c">
+      <Filter>Source Files</Filter>
+    </ClCompile>
+  </ItemGroup>
+  <ItemGroup>
+    <ClInclude Include="..\src\org\apache\hadoop\util\crc32_zlib_polynomial_tables.h">
+      <Filter>Source Files</Filter>
+    </ClInclude>
+    <ClInclude Include="..\src\org\apache\hadoop\util\crc32c_tables.h">
+      <Filter>Source Files</Filter>
+    </ClInclude>
+    <ClInclude Include="src\org\apache\hadoop\io\nativeio\file_descriptor.h">
+      <Filter>Source Files</Filter>
+    </ClInclude>
+    <ClInclude Include="src\org\apache\hadoop\util\bulk_crc32.h">
+      <Filter>Header Files</Filter>
+    </ClInclude>
+    <ClInclude Include="src\org\apache\hadoop\util\crc32_zlib_polynomial_tables.h">
+      <Filter>Header Files</Filter>
+    </ClInclude>
+    <ClInclude Include="src\org\apache\hadoop\util\crc32c_tables.h">
+      <Filter>Header Files</Filter>
+    </ClInclude>
+    <ClInclude Include="src\org_apache_hadoop.h">
+      <Filter>Header Files</Filter>
+    </ClInclude>
+  </ItemGroup>
+</Project>

+ 11 - 3
hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c

@@ -16,12 +16,18 @@
  * limitations under the License.
  * limitations under the License.
  */
  */
 
 
-#include <dlfcn.h>
+
+#if defined HADOOP_SNAPPY_LIBRARY
+
 #include <stdio.h>
 #include <stdio.h>
 #include <stdlib.h>
 #include <stdlib.h>
 #include <string.h>
 #include <string.h>
 
 
+#ifdef UNIX
+#include <dlfcn.h>
 #include "config.h"
 #include "config.h"
+#endif // UNIX
+
 #include "org_apache_hadoop_io_compress_snappy.h"
 #include "org_apache_hadoop_io_compress_snappy.h"
 #include "org_apache_hadoop_io_compress_snappy_SnappyCompressor.h"
 #include "org_apache_hadoop_io_compress_snappy_SnappyCompressor.h"
 
 
@@ -81,7 +87,7 @@ JNIEXPORT jint JNICALL Java_org_apache_hadoop_io_compress_snappy_SnappyCompresso
   UNLOCK_CLASS(env, clazz, "SnappyCompressor");
   UNLOCK_CLASS(env, clazz, "SnappyCompressor");
 
 
   if (uncompressed_bytes == 0) {
   if (uncompressed_bytes == 0) {
-    return 0;
+    return (jint)0;
   }
   }
 
 
   // Get the output direct buffer
   // Get the output direct buffer
@@ -90,7 +96,7 @@ JNIEXPORT jint JNICALL Java_org_apache_hadoop_io_compress_snappy_SnappyCompresso
   UNLOCK_CLASS(env, clazz, "SnappyCompressor");
   UNLOCK_CLASS(env, clazz, "SnappyCompressor");
 
 
   if (compressed_bytes == 0) {
   if (compressed_bytes == 0) {
-    return 0;
+    return (jint)0;
   }
   }
 
 
   /* size_t should always be 4 bytes or larger. */
   /* size_t should always be 4 bytes or larger. */
@@ -109,3 +115,5 @@ JNIEXPORT jint JNICALL Java_org_apache_hadoop_io_compress_snappy_SnappyCompresso
   (*env)->SetIntField(env, thisj, SnappyCompressor_uncompressedDirectBufLen, 0);
   (*env)->SetIntField(env, thisj, SnappyCompressor_uncompressedDirectBufLen, 0);
   return (jint)buf_len;
   return (jint)buf_len;
 }
 }
+
+#endif //define HADOOP_SNAPPY_LIBRARY

+ 9 - 1
hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c

@@ -16,12 +16,18 @@
  * limitations under the License.
  * limitations under the License.
  */
  */
 
 
-#include <dlfcn.h>
+
+#if defined HADOOP_SNAPPY_LIBRARY
+
 #include <stdio.h>
 #include <stdio.h>
 #include <stdlib.h>
 #include <stdlib.h>
 #include <string.h>
 #include <string.h>
 
 
+#ifdef UNIX
 #include "config.h"
 #include "config.h"
+#include <dlfcn.h>
+#endif
+
 #include "org_apache_hadoop_io_compress_snappy.h"
 #include "org_apache_hadoop_io_compress_snappy.h"
 #include "org_apache_hadoop_io_compress_snappy_SnappyDecompressor.h"
 #include "org_apache_hadoop_io_compress_snappy_SnappyDecompressor.h"
 
 
@@ -103,3 +109,5 @@ JNIEXPORT jint JNICALL Java_org_apache_hadoop_io_compress_snappy_SnappyDecompres
 
 
   return (jint)uncompressed_direct_buf_len;
   return (jint)uncompressed_direct_buf_len;
 }
 }
+
+#endif //define HADOOP_SNAPPY_LIBRARY

+ 139 - 48
hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c

@@ -16,12 +16,15 @@
  * limitations under the License.
  * limitations under the License.
  */
  */
 
 
-#include <dlfcn.h>
 #include <stdio.h>
 #include <stdio.h>
 #include <stdlib.h>
 #include <stdlib.h>
 #include <string.h>
 #include <string.h>
 
 
+#ifdef UNIX
+#include <dlfcn.h>
 #include "config.h"
 #include "config.h"
+#endif
+
 #include "org_apache_hadoop_io_compress_zlib.h"
 #include "org_apache_hadoop_io_compress_zlib.h"
 #include "org_apache_hadoop_io_compress_zlib_ZlibCompressor.h"
 #include "org_apache_hadoop_io_compress_zlib_ZlibCompressor.h"
 
 
@@ -35,48 +38,124 @@ static jfieldID ZlibCompressor_directBufferSize;
 static jfieldID ZlibCompressor_finish;
 static jfieldID ZlibCompressor_finish;
 static jfieldID ZlibCompressor_finished;
 static jfieldID ZlibCompressor_finished;
 
 
+#ifdef UNIX
 static int (*dlsym_deflateInit2_)(z_streamp, int, int, int, int, int, const char *, int);
 static int (*dlsym_deflateInit2_)(z_streamp, int, int, int, int, int, const char *, int);
 static int (*dlsym_deflate)(z_streamp, int);
 static int (*dlsym_deflate)(z_streamp, int);
 static int (*dlsym_deflateSetDictionary)(z_streamp, const Bytef *, uInt);
 static int (*dlsym_deflateSetDictionary)(z_streamp, const Bytef *, uInt);
 static int (*dlsym_deflateReset)(z_streamp);
 static int (*dlsym_deflateReset)(z_streamp);
 static int (*dlsym_deflateEnd)(z_streamp);
 static int (*dlsym_deflateEnd)(z_streamp);
+#endif
+
+#ifdef WINDOWS
+#include <Strsafe.h>
+typedef int (__cdecl *__dlsym_deflateInit2_) (z_streamp, int, int, int, int, int, const char *, int);
+typedef int (__cdecl *__dlsym_deflate) (z_streamp, int);
+typedef int (__cdecl *__dlsym_deflateSetDictionary) (z_streamp, const Bytef *, uInt);
+typedef int (__cdecl *__dlsym_deflateReset) (z_streamp);
+typedef int (__cdecl *__dlsym_deflateEnd) (z_streamp);
+static __dlsym_deflateInit2_ dlsym_deflateInit2_;
+static __dlsym_deflate dlsym_deflate;
+static __dlsym_deflateSetDictionary dlsym_deflateSetDictionary;
+static __dlsym_deflateReset dlsym_deflateReset;
+static __dlsym_deflateEnd dlsym_deflateEnd;
+
+// Try to load zlib.dll from the dir where hadoop.dll is located.
+HANDLE LoadZlibTryHadoopNativeDir() {
+  HMODULE libz = NULL;
+  PCWSTR HADOOP_DLL = L"hadoop.dll";
+  size_t HADOOP_DLL_LEN = 10;
+  WCHAR path[MAX_PATH] = { 0 };
+  BOOL isPathValid = FALSE;
+
+  // Get hadoop.dll full path
+  HMODULE hModule = GetModuleHandle(HADOOP_DLL);
+  if (hModule != NULL) {
+    if (GetModuleFileName(hModule, path, MAX_PATH) > 0) {
+      size_t size = 0;
+      if (StringCchLength(path, MAX_PATH, &size) == S_OK) {
+
+        // Update path variable to have the full path to the zlib.dll
+        size = size - HADOOP_DLL_LEN;
+        if (size >= 0) {
+          path[size] = L'\0';
+          if (StringCchCat(path, MAX_PATH, HADOOP_ZLIB_LIBRARY) == S_OK) {
+            isPathValid = TRUE;
+          }
+        }
+      }
+    }
+  }
+
+  if (isPathValid) {
+    libz = LoadLibrary(path);
+  }
+
+  // fallback to system paths
+  if (!libz) {
+    libz = LoadLibrary(HADOOP_ZLIB_LIBRARY);
+  }
+
+  return libz;
+}
+#endif
 
 
 JNIEXPORT void JNICALL
 JNIEXPORT void JNICALL
 Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_initIDs(
 Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_initIDs(
 	JNIEnv *env, jclass class
 	JNIEnv *env, jclass class
 	) {
 	) {
+#ifdef UNIX
 	// Load libz.so
 	// Load libz.so
 	void *libz = dlopen(HADOOP_ZLIB_LIBRARY, RTLD_LAZY | RTLD_GLOBAL);
 	void *libz = dlopen(HADOOP_ZLIB_LIBRARY, RTLD_LAZY | RTLD_GLOBAL);
-	if (!libz) {
+  if (!libz) {
 		THROW(env, "java/lang/UnsatisfiedLinkError", "Cannot load libz.so");
 		THROW(env, "java/lang/UnsatisfiedLinkError", "Cannot load libz.so");
 	  	return;
 	  	return;
 	}
 	}
+#endif
+
+#ifdef WINDOWS
+  HMODULE libz = LoadZlibTryHadoopNativeDir();
 
 
+  if (!libz) {
+		THROW(env, "java/lang/UnsatisfiedLinkError", "Cannot load zlib1.dll");
+    return;
+	}
+#endif
+
+#ifdef UNIX
 	// Locate the requisite symbols from libz.so
 	// Locate the requisite symbols from libz.so
 	dlerror();                                 // Clear any existing error
 	dlerror();                                 // Clear any existing error
-	LOAD_DYNAMIC_SYMBOL(dlsym_deflateInit2_, env, libz, "deflateInit2_");
-	LOAD_DYNAMIC_SYMBOL(dlsym_deflate, env, libz, "deflate");
-	LOAD_DYNAMIC_SYMBOL(dlsym_deflateSetDictionary, env, libz, "deflateSetDictionary");
-	LOAD_DYNAMIC_SYMBOL(dlsym_deflateReset, env, libz, "deflateReset");
-	LOAD_DYNAMIC_SYMBOL(dlsym_deflateEnd, env, libz, "deflateEnd");
+  LOAD_DYNAMIC_SYMBOL(dlsym_deflateInit2_, env, libz, "deflateInit2_");
+  LOAD_DYNAMIC_SYMBOL(dlsym_deflate, env, libz, "deflate");
+  LOAD_DYNAMIC_SYMBOL(dlsym_deflateSetDictionary, env, libz, "deflateSetDictionary");
+  LOAD_DYNAMIC_SYMBOL(dlsym_deflateReset, env, libz, "deflateReset");
+  LOAD_DYNAMIC_SYMBOL(dlsym_deflateEnd, env, libz, "deflateEnd");
+#endif
+
+#ifdef WINDOWS
+  LOAD_DYNAMIC_SYMBOL(__dlsym_deflateInit2_, dlsym_deflateInit2_, env, libz, "deflateInit2_");
+	LOAD_DYNAMIC_SYMBOL(__dlsym_deflate, dlsym_deflate, env, libz, "deflate");
+	LOAD_DYNAMIC_SYMBOL(__dlsym_deflateSetDictionary, dlsym_deflateSetDictionary, env, libz, "deflateSetDictionary");
+	LOAD_DYNAMIC_SYMBOL(__dlsym_deflateReset, dlsym_deflateReset, env, libz, "deflateReset");
+	LOAD_DYNAMIC_SYMBOL(__dlsym_deflateEnd, dlsym_deflateEnd, env, libz, "deflateEnd");
+#endif
 
 
 	// Initialize the requisite fieldIds
 	// Initialize the requisite fieldIds
-    ZlibCompressor_clazz = (*env)->GetStaticFieldID(env, class, "clazz", 
+    ZlibCompressor_clazz = (*env)->GetStaticFieldID(env, class, "clazz",
                                                       "Ljava/lang/Class;");
                                                       "Ljava/lang/Class;");
     ZlibCompressor_stream = (*env)->GetFieldID(env, class, "stream", "J");
     ZlibCompressor_stream = (*env)->GetFieldID(env, class, "stream", "J");
     ZlibCompressor_finish = (*env)->GetFieldID(env, class, "finish", "Z");
     ZlibCompressor_finish = (*env)->GetFieldID(env, class, "finish", "Z");
     ZlibCompressor_finished = (*env)->GetFieldID(env, class, "finished", "Z");
     ZlibCompressor_finished = (*env)->GetFieldID(env, class, "finished", "Z");
-    ZlibCompressor_uncompressedDirectBuf = (*env)->GetFieldID(env, class, 
-    									"uncompressedDirectBuf", 
+    ZlibCompressor_uncompressedDirectBuf = (*env)->GetFieldID(env, class,
+        "uncompressedDirectBuf",
     									"Ljava/nio/Buffer;");
     									"Ljava/nio/Buffer;");
-    ZlibCompressor_uncompressedDirectBufOff = (*env)->GetFieldID(env, class, 
+    ZlibCompressor_uncompressedDirectBufOff = (*env)->GetFieldID(env, class,
     										"uncompressedDirectBufOff", "I");
     										"uncompressedDirectBufOff", "I");
-    ZlibCompressor_uncompressedDirectBufLen = (*env)->GetFieldID(env, class, 
+    ZlibCompressor_uncompressedDirectBufLen = (*env)->GetFieldID(env, class,
     										"uncompressedDirectBufLen", "I");
     										"uncompressedDirectBufLen", "I");
-    ZlibCompressor_compressedDirectBuf = (*env)->GetFieldID(env, class, 
-    									"compressedDirectBuf", 
+    ZlibCompressor_compressedDirectBuf = (*env)->GetFieldID(env, class,
+                      "compressedDirectBuf",
     									"Ljava/nio/Buffer;");
     									"Ljava/nio/Buffer;");
-    ZlibCompressor_directBufferSize = (*env)->GetFieldID(env, class, 
+    ZlibCompressor_directBufferSize = (*env)->GetFieldID(env, class,
     										"directBufferSize", "I");
     										"directBufferSize", "I");
 }
 }
 
 
@@ -84,7 +163,9 @@ JNIEXPORT jlong JNICALL
 Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_init(
 Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_init(
 	JNIEnv *env, jclass class, jint level, jint strategy, jint windowBits
 	JNIEnv *env, jclass class, jint level, jint strategy, jint windowBits
 	) {
 	) {
-	// Create a z_stream
+    int rv = 0;
+    static const int memLevel = 8; 							// See zconf.h
+	  // Create a z_stream
     z_stream *stream = malloc(sizeof(z_stream));
     z_stream *stream = malloc(sizeof(z_stream));
     if (!stream) {
     if (!stream) {
 		THROW(env, "java/lang/OutOfMemoryError", NULL);
 		THROW(env, "java/lang/OutOfMemoryError", NULL);
@@ -93,17 +174,16 @@ Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_init(
     memset((void*)stream, 0, sizeof(z_stream));
     memset((void*)stream, 0, sizeof(z_stream));
 
 
 	// Initialize stream
 	// Initialize stream
-	static const int memLevel = 8; 							// See zconf.h
-    int rv = (*dlsym_deflateInit2_)(stream, level, Z_DEFLATED, windowBits,
+    rv = (*dlsym_deflateInit2_)(stream, level, Z_DEFLATED, windowBits,
     			memLevel, strategy, ZLIB_VERSION, sizeof(z_stream));
     			memLevel, strategy, ZLIB_VERSION, sizeof(z_stream));
-    			
+
     if (rv != Z_OK) {
     if (rv != Z_OK) {
 	    // Contingency - Report error by throwing appropriate exceptions
 	    // Contingency - Report error by throwing appropriate exceptions
 	    free(stream);
 	    free(stream);
 	    stream = NULL;
 	    stream = NULL;
-	
+
 		switch (rv) {
 		switch (rv) {
-			case Z_MEM_ERROR: 
+			case Z_MEM_ERROR:
 			    {
 			    {
 		    		THROW(env, "java/lang/OutOfMemoryError", NULL);
 		    		THROW(env, "java/lang/OutOfMemoryError", NULL);
 			    }
 			    }
@@ -120,27 +200,28 @@ Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_init(
 		    break;
 		    break;
 	    }
 	    }
 	}
 	}
-	
+
     return JLONG(stream);
     return JLONG(stream);
 }
 }
 
 
 JNIEXPORT void JNICALL
 JNIEXPORT void JNICALL
 Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_setDictionary(
 Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_setDictionary(
-	JNIEnv *env, jclass class, jlong stream, 
+	JNIEnv *env, jclass class, jlong stream,
 	jarray b, jint off, jint len
 	jarray b, jint off, jint len
 	) {
 	) {
+    int rv = 0;
     Bytef *buf = (*env)->GetPrimitiveArrayCritical(env, b, 0);
     Bytef *buf = (*env)->GetPrimitiveArrayCritical(env, b, 0);
     if (!buf) {
     if (!buf) {
         return;
         return;
     }
     }
-    int rv = dlsym_deflateSetDictionary(ZSTREAM(stream), buf + off, len);
+    rv = dlsym_deflateSetDictionary(ZSTREAM(stream), buf + off, len);
     (*env)->ReleasePrimitiveArrayCritical(env, b, buf, 0);
     (*env)->ReleasePrimitiveArrayCritical(env, b, buf, 0);
-    
+
     if (rv != Z_OK) {
     if (rv != Z_OK) {
     	// Contingency - Report error by throwing appropriate exceptions
     	// Contingency - Report error by throwing appropriate exceptions
 	    switch (rv) {
 	    switch (rv) {
 		    case Z_STREAM_ERROR:
 		    case Z_STREAM_ERROR:
-			{	
+			{
 		    	THROW(env, "java/lang/IllegalArgumentException", NULL);
 		    	THROW(env, "java/lang/IllegalArgumentException", NULL);
 			}
 			}
 			break;
 			break;
@@ -157,75 +238,85 @@ JNIEXPORT jint JNICALL
 Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_deflateBytesDirect(
 Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_deflateBytesDirect(
 	JNIEnv *env, jobject this
 	JNIEnv *env, jobject this
 	) {
 	) {
+    jobject clazz = NULL;
+    jobject uncompressed_direct_buf = NULL;
+    jint uncompressed_direct_buf_off = 0;
+    jint uncompressed_direct_buf_len = 0;
+    jobject compressed_direct_buf = NULL;
+    jint compressed_direct_buf_len = 0;
+    jboolean finish;
+    Bytef* uncompressed_bytes = NULL;
+    Bytef* compressed_bytes = NULL;
+    int rv = 0;
+    jint no_compressed_bytes = 0;
 	// Get members of ZlibCompressor
 	// Get members of ZlibCompressor
     z_stream *stream = ZSTREAM(
     z_stream *stream = ZSTREAM(
-    						(*env)->GetLongField(env, this, 
+                (*env)->GetLongField(env, this,
     									ZlibCompressor_stream)
     									ZlibCompressor_stream)
     					);
     					);
     if (!stream) {
     if (!stream) {
 		THROW(env, "java/lang/NullPointerException", NULL);
 		THROW(env, "java/lang/NullPointerException", NULL);
 		return (jint)0;
 		return (jint)0;
-    } 
+    }
 
 
     // Get members of ZlibCompressor
     // Get members of ZlibCompressor
-    jobject clazz = (*env)->GetStaticObjectField(env, this, 
+    clazz = (*env)->GetStaticObjectField(env, this,
                                                  ZlibCompressor_clazz);
                                                  ZlibCompressor_clazz);
-	jobject uncompressed_direct_buf = (*env)->GetObjectField(env, this, 
+	uncompressed_direct_buf = (*env)->GetObjectField(env, this,
 									ZlibCompressor_uncompressedDirectBuf);
 									ZlibCompressor_uncompressedDirectBuf);
-	jint uncompressed_direct_buf_off = (*env)->GetIntField(env, this, 
+	uncompressed_direct_buf_off = (*env)->GetIntField(env, this,
 									ZlibCompressor_uncompressedDirectBufOff);
 									ZlibCompressor_uncompressedDirectBufOff);
-	jint uncompressed_direct_buf_len = (*env)->GetIntField(env, this, 
+	uncompressed_direct_buf_len = (*env)->GetIntField(env, this,
 									ZlibCompressor_uncompressedDirectBufLen);
 									ZlibCompressor_uncompressedDirectBufLen);
 
 
-	jobject compressed_direct_buf = (*env)->GetObjectField(env, this, 
+	compressed_direct_buf = (*env)->GetObjectField(env, this,
 									ZlibCompressor_compressedDirectBuf);
 									ZlibCompressor_compressedDirectBuf);
-	jint compressed_direct_buf_len = (*env)->GetIntField(env, this, 
+	compressed_direct_buf_len = (*env)->GetIntField(env, this,
 									ZlibCompressor_directBufferSize);
 									ZlibCompressor_directBufferSize);
 
 
-	jboolean finish = (*env)->GetBooleanField(env, this, ZlibCompressor_finish);
+	finish = (*env)->GetBooleanField(env, this, ZlibCompressor_finish);
 
 
     // Get the input direct buffer
     // Get the input direct buffer
     LOCK_CLASS(env, clazz, "ZlibCompressor");
     LOCK_CLASS(env, clazz, "ZlibCompressor");
-	Bytef* uncompressed_bytes = (*env)->GetDirectBufferAddress(env, 
+    uncompressed_bytes = (*env)->GetDirectBufferAddress(env,
 											uncompressed_direct_buf);
 											uncompressed_direct_buf);
     UNLOCK_CLASS(env, clazz, "ZlibCompressor");
     UNLOCK_CLASS(env, clazz, "ZlibCompressor");
-    
+
   	if (uncompressed_bytes == 0) {
   	if (uncompressed_bytes == 0) {
     	return (jint)0;
     	return (jint)0;
 	}
 	}
-	
+
     // Get the output direct buffer
     // Get the output direct buffer
     LOCK_CLASS(env, clazz, "ZlibCompressor");
     LOCK_CLASS(env, clazz, "ZlibCompressor");
-	Bytef* compressed_bytes = (*env)->GetDirectBufferAddress(env, 
+    compressed_bytes = (*env)->GetDirectBufferAddress(env,
 										compressed_direct_buf);
 										compressed_direct_buf);
     UNLOCK_CLASS(env, clazz, "ZlibCompressor");
     UNLOCK_CLASS(env, clazz, "ZlibCompressor");
 
 
   	if (compressed_bytes == 0) {
   	if (compressed_bytes == 0) {
 		return (jint)0;
 		return (jint)0;
 	}
 	}
-	
+
 	// Re-calibrate the z_stream
 	// Re-calibrate the z_stream
   	stream->next_in = uncompressed_bytes + uncompressed_direct_buf_off;
   	stream->next_in = uncompressed_bytes + uncompressed_direct_buf_off;
   	stream->next_out = compressed_bytes;
   	stream->next_out = compressed_bytes;
   	stream->avail_in = uncompressed_direct_buf_len;
   	stream->avail_in = uncompressed_direct_buf_len;
-	stream->avail_out = compressed_direct_buf_len;
-	
+    stream->avail_out = compressed_direct_buf_len;
+
 	// Compress
 	// Compress
-	int rv = dlsym_deflate(stream, finish ? Z_FINISH : Z_NO_FLUSH);
+	rv = dlsym_deflate(stream, finish ? Z_FINISH : Z_NO_FLUSH);
 
 
-	jint no_compressed_bytes = 0;
 	switch (rv) {
 	switch (rv) {
     	// Contingency? - Report error by throwing appropriate exceptions
     	// Contingency? - Report error by throwing appropriate exceptions
   		case Z_STREAM_END:
   		case Z_STREAM_END:
   		{
   		{
   			(*env)->SetBooleanField(env, this, ZlibCompressor_finished, JNI_TRUE);
   			(*env)->SetBooleanField(env, this, ZlibCompressor_finished, JNI_TRUE);
   		} // cascade
   		} // cascade
-	  	case Z_OK: 
+      case Z_OK:
 	  	{
 	  	{
 	  		uncompressed_direct_buf_off += uncompressed_direct_buf_len - stream->avail_in;
 	  		uncompressed_direct_buf_off += uncompressed_direct_buf_len - stream->avail_in;
-			(*env)->SetIntField(env, this, 
+			(*env)->SetIntField(env, this,
 						ZlibCompressor_uncompressedDirectBufOff, uncompressed_direct_buf_off);
 						ZlibCompressor_uncompressedDirectBufOff, uncompressed_direct_buf_off);
-			(*env)->SetIntField(env, this, 
+			(*env)->SetIntField(env, this,
 						ZlibCompressor_uncompressedDirectBufLen, stream->avail_in);
 						ZlibCompressor_uncompressedDirectBufLen, stream->avail_in);
 			no_compressed_bytes = compressed_direct_buf_len - stream->avail_out;
 			no_compressed_bytes = compressed_direct_buf_len - stream->avail_out;
 	  	}
 	  	}
@@ -238,7 +329,7 @@ Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_deflateBytesDirect(
 		}
 		}
 		break;
 		break;
   	}
   	}
-  	
+
   	return no_compressed_bytes;
   	return no_compressed_bytes;
 }
 }
 
 

+ 96 - 43
hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c

@@ -16,12 +16,15 @@
  * limitations under the License.
  * limitations under the License.
  */
  */
 
 
-#include <dlfcn.h>
 #include <stdio.h>
 #include <stdio.h>
 #include <stdlib.h>
 #include <stdlib.h>
 #include <string.h>
 #include <string.h>
 
 
+#ifdef UNIX
+#include <dlfcn.h>
 #include "config.h"
 #include "config.h"
+#endif
+
 #include "org_apache_hadoop_io_compress_zlib.h"
 #include "org_apache_hadoop_io_compress_zlib.h"
 #include "org_apache_hadoop_io_compress_zlib_ZlibDecompressor.h"
 #include "org_apache_hadoop_io_compress_zlib_ZlibDecompressor.h"
 
 
@@ -35,48 +38,88 @@ static jfieldID ZlibDecompressor_directBufferSize;
 static jfieldID ZlibDecompressor_needDict;
 static jfieldID ZlibDecompressor_needDict;
 static jfieldID ZlibDecompressor_finished;
 static jfieldID ZlibDecompressor_finished;
 
 
+#ifdef UNIX
 static int (*dlsym_inflateInit2_)(z_streamp, int, const char *, int);
 static int (*dlsym_inflateInit2_)(z_streamp, int, const char *, int);
 static int (*dlsym_inflate)(z_streamp, int);
 static int (*dlsym_inflate)(z_streamp, int);
 static int (*dlsym_inflateSetDictionary)(z_streamp, const Bytef *, uInt);
 static int (*dlsym_inflateSetDictionary)(z_streamp, const Bytef *, uInt);
 static int (*dlsym_inflateReset)(z_streamp);
 static int (*dlsym_inflateReset)(z_streamp);
 static int (*dlsym_inflateEnd)(z_streamp);
 static int (*dlsym_inflateEnd)(z_streamp);
+#endif
+
+#ifdef WINDOWS
+#include <Strsafe.h>
+typedef int (__cdecl *__dlsym_inflateInit2_)(z_streamp, int, const char *, int);
+typedef int (__cdecl *__dlsym_inflate)(z_streamp, int);
+typedef int (__cdecl *__dlsym_inflateSetDictionary)(z_streamp, const Bytef *, uInt);
+typedef int (__cdecl *__dlsym_inflateReset)(z_streamp);
+typedef int (__cdecl *__dlsym_inflateEnd)(z_streamp);
+static __dlsym_inflateInit2_ dlsym_inflateInit2_;
+static __dlsym_inflate dlsym_inflate;
+static __dlsym_inflateSetDictionary dlsym_inflateSetDictionary;
+static __dlsym_inflateReset dlsym_inflateReset;
+static __dlsym_inflateEnd dlsym_inflateEnd;
+extern HANDLE LoadZlibTryHadoopNativeDir();
+#endif
 
 
 JNIEXPORT void JNICALL
 JNIEXPORT void JNICALL
 Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_initIDs(
 Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_initIDs(
-	JNIEnv *env, jclass class
+JNIEnv *env, jclass class
 	) {
 	) {
 	// Load libz.so
 	// Load libz.so
-    void *libz = dlopen(HADOOP_ZLIB_LIBRARY, RTLD_LAZY | RTLD_GLOBAL);
+#ifdef UNIX
+  void *libz = dlopen(HADOOP_ZLIB_LIBRARY, RTLD_LAZY | RTLD_GLOBAL);
 	if (!libz) {
 	if (!libz) {
 	  THROW(env, "java/lang/UnsatisfiedLinkError", "Cannot load libz.so");
 	  THROW(env, "java/lang/UnsatisfiedLinkError", "Cannot load libz.so");
 	  return;
 	  return;
-	} 
+	}
+#endif
+
+#ifdef WINDOWS
+  HMODULE libz = LoadZlibTryHadoopNativeDir();
+
+	if (!libz) {
+	  THROW(env, "java/lang/UnsatisfiedLinkError", "Cannot load zlib1.dll");
+	  return;
+	}
+#endif
+
 
 
 	// Locate the requisite symbols from libz.so
 	// Locate the requisite symbols from libz.so
+#ifdef UNIX
 	dlerror();                                 // Clear any existing error
 	dlerror();                                 // Clear any existing error
 	LOAD_DYNAMIC_SYMBOL(dlsym_inflateInit2_, env, libz, "inflateInit2_");
 	LOAD_DYNAMIC_SYMBOL(dlsym_inflateInit2_, env, libz, "inflateInit2_");
 	LOAD_DYNAMIC_SYMBOL(dlsym_inflate, env, libz, "inflate");
 	LOAD_DYNAMIC_SYMBOL(dlsym_inflate, env, libz, "inflate");
 	LOAD_DYNAMIC_SYMBOL(dlsym_inflateSetDictionary, env, libz, "inflateSetDictionary");
 	LOAD_DYNAMIC_SYMBOL(dlsym_inflateSetDictionary, env, libz, "inflateSetDictionary");
 	LOAD_DYNAMIC_SYMBOL(dlsym_inflateReset, env, libz, "inflateReset");
 	LOAD_DYNAMIC_SYMBOL(dlsym_inflateReset, env, libz, "inflateReset");
 	LOAD_DYNAMIC_SYMBOL(dlsym_inflateEnd, env, libz, "inflateEnd");
 	LOAD_DYNAMIC_SYMBOL(dlsym_inflateEnd, env, libz, "inflateEnd");
+#endif
+
+#ifdef WINDOWS
+	LOAD_DYNAMIC_SYMBOL(__dlsym_inflateInit2_, dlsym_inflateInit2_, env, libz, "inflateInit2_");
+	LOAD_DYNAMIC_SYMBOL(__dlsym_inflate, dlsym_inflate, env, libz, "inflate");
+	LOAD_DYNAMIC_SYMBOL(__dlsym_inflateSetDictionary, dlsym_inflateSetDictionary, env, libz, "inflateSetDictionary");
+	LOAD_DYNAMIC_SYMBOL(__dlsym_inflateReset, dlsym_inflateReset, env, libz, "inflateReset");
+	LOAD_DYNAMIC_SYMBOL(__dlsym_inflateEnd, dlsym_inflateEnd, env, libz, "inflateEnd");
+#endif
+
 
 
-	// Initialize the requisite fieldIds
-    ZlibDecompressor_clazz = (*env)->GetStaticFieldID(env, class, "clazz", 
+  // Initialize the requisite fieldIds
+    ZlibDecompressor_clazz = (*env)->GetStaticFieldID(env, class, "clazz",
                                                       "Ljava/lang/Class;");
                                                       "Ljava/lang/Class;");
     ZlibDecompressor_stream = (*env)->GetFieldID(env, class, "stream", "J");
     ZlibDecompressor_stream = (*env)->GetFieldID(env, class, "stream", "J");
     ZlibDecompressor_needDict = (*env)->GetFieldID(env, class, "needDict", "Z");
     ZlibDecompressor_needDict = (*env)->GetFieldID(env, class, "needDict", "Z");
     ZlibDecompressor_finished = (*env)->GetFieldID(env, class, "finished", "Z");
     ZlibDecompressor_finished = (*env)->GetFieldID(env, class, "finished", "Z");
-    ZlibDecompressor_compressedDirectBuf = (*env)->GetFieldID(env, class, 
-    											"compressedDirectBuf", 
+    ZlibDecompressor_compressedDirectBuf = (*env)->GetFieldID(env, class,
+                          "compressedDirectBuf",
     											"Ljava/nio/Buffer;");
     											"Ljava/nio/Buffer;");
-    ZlibDecompressor_compressedDirectBufOff = (*env)->GetFieldID(env, class, 
+    ZlibDecompressor_compressedDirectBufOff = (*env)->GetFieldID(env, class,
     										"compressedDirectBufOff", "I");
     										"compressedDirectBufOff", "I");
-    ZlibDecompressor_compressedDirectBufLen = (*env)->GetFieldID(env, class, 
+    ZlibDecompressor_compressedDirectBufLen = (*env)->GetFieldID(env, class,
     										"compressedDirectBufLen", "I");
     										"compressedDirectBufLen", "I");
-    ZlibDecompressor_uncompressedDirectBuf = (*env)->GetFieldID(env, class, 
-    											"uncompressedDirectBuf", 
+    ZlibDecompressor_uncompressedDirectBuf = (*env)->GetFieldID(env, class,
+                          "uncompressedDirectBuf",
     											"Ljava/nio/Buffer;");
     											"Ljava/nio/Buffer;");
-    ZlibDecompressor_directBufferSize = (*env)->GetFieldID(env, class, 
+    ZlibDecompressor_directBufferSize = (*env)->GetFieldID(env, class,
     											"directBufferSize", "I");
     											"directBufferSize", "I");
 }
 }
 
 
@@ -84,21 +127,22 @@ JNIEXPORT jlong JNICALL
 Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_init(
 Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_init(
 	JNIEnv *env, jclass cls, jint windowBits
 	JNIEnv *env, jclass cls, jint windowBits
 	) {
 	) {
+    int rv = 0;
     z_stream *stream = malloc(sizeof(z_stream));
     z_stream *stream = malloc(sizeof(z_stream));
     memset((void*)stream, 0, sizeof(z_stream));
     memset((void*)stream, 0, sizeof(z_stream));
 
 
     if (stream == 0) {
     if (stream == 0) {
 		THROW(env, "java/lang/OutOfMemoryError", NULL);
 		THROW(env, "java/lang/OutOfMemoryError", NULL);
 		return (jlong)0;
 		return (jlong)0;
-    } 
-    
-    int rv = dlsym_inflateInit2_(stream, windowBits, ZLIB_VERSION, sizeof(z_stream));
+    }
+
+    rv = dlsym_inflateInit2_(stream, windowBits, ZLIB_VERSION, sizeof(z_stream));
 
 
 	if (rv != Z_OK) {
 	if (rv != Z_OK) {
 	    // Contingency - Report error by throwing appropriate exceptions
 	    // Contingency - Report error by throwing appropriate exceptions
 		free(stream);
 		free(stream);
 		stream = NULL;
 		stream = NULL;
-		
+
 		switch (rv) {
 		switch (rv) {
 		 	case Z_MEM_ERROR:
 		 	case Z_MEM_ERROR:
 		 	{
 		 	{
@@ -112,7 +156,7 @@ Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_init(
 	  		break;
 	  		break;
 		}
 		}
 	}
 	}
-	
+
 	return JLONG(stream);
 	return JLONG(stream);
 }
 }
 
 
@@ -121,21 +165,22 @@ Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_setDictionary(
 	JNIEnv *env, jclass cls, jlong stream,
 	JNIEnv *env, jclass cls, jlong stream,
 	jarray b, jint off, jint len
 	jarray b, jint off, jint len
 	) {
 	) {
+    int rv = 0;
     Bytef *buf = (*env)->GetPrimitiveArrayCritical(env, b, 0);
     Bytef *buf = (*env)->GetPrimitiveArrayCritical(env, b, 0);
     if (!buf) {
     if (!buf) {
 		THROW(env, "java/lang/InternalError", NULL);
 		THROW(env, "java/lang/InternalError", NULL);
         return;
         return;
     }
     }
-    int rv = dlsym_inflateSetDictionary(ZSTREAM(stream), buf + off, len);
+    rv = dlsym_inflateSetDictionary(ZSTREAM(stream), buf + off, len);
     (*env)->ReleasePrimitiveArrayCritical(env, b, buf, 0);
     (*env)->ReleasePrimitiveArrayCritical(env, b, buf, 0);
-    
+
     if (rv != Z_OK) {
     if (rv != Z_OK) {
 	    // Contingency - Report error by throwing appropriate exceptions
 	    // Contingency - Report error by throwing appropriate exceptions
 		switch (rv) {
 		switch (rv) {
 		    case Z_STREAM_ERROR:
 		    case Z_STREAM_ERROR:
 	    	case Z_DATA_ERROR:
 	    	case Z_DATA_ERROR:
 			{
 			{
-				THROW(env, "java/lang/IllegalArgumentException", 
+				THROW(env, "java/lang/IllegalArgumentException",
 					(ZSTREAM(stream))->msg);
 					(ZSTREAM(stream))->msg);
 			}
 			}
 			break;
 			break;
@@ -152,62 +197,71 @@ JNIEXPORT jint JNICALL
 Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_inflateBytesDirect(
 Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_inflateBytesDirect(
 	JNIEnv *env, jobject this
 	JNIEnv *env, jobject this
 	) {
 	) {
+    jobject clazz = NULL;
+    jarray compressed_direct_buf = NULL;
+    jint compressed_direct_buf_off = 0;
+    jint compressed_direct_buf_len = 0;
+    jarray uncompressed_direct_buf = NULL;
+    jint uncompressed_direct_buf_len = 0;
+    Bytef *compressed_bytes = NULL;
+    Bytef *uncompressed_bytes = NULL;
+    int rv = 0;
+    int no_decompressed_bytes = 0;
 	// Get members of ZlibDecompressor
 	// Get members of ZlibDecompressor
     z_stream *stream = ZSTREAM(
     z_stream *stream = ZSTREAM(
-    						(*env)->GetLongField(env, this, 
+                (*env)->GetLongField(env, this,
     									ZlibDecompressor_stream)
     									ZlibDecompressor_stream)
     					);
     					);
     if (!stream) {
     if (!stream) {
 		THROW(env, "java/lang/NullPointerException", NULL);
 		THROW(env, "java/lang/NullPointerException", NULL);
 		return (jint)0;
 		return (jint)0;
-    } 
+    }
 
 
     // Get members of ZlibDecompressor
     // Get members of ZlibDecompressor
-    jobject clazz = (*env)->GetStaticObjectField(env, this, 
+    clazz = (*env)->GetStaticObjectField(env, this,
                                                  ZlibDecompressor_clazz);
                                                  ZlibDecompressor_clazz);
-	jarray compressed_direct_buf = (jarray)(*env)->GetObjectField(env, this, 
+	compressed_direct_buf = (jarray)(*env)->GetObjectField(env, this,
 											ZlibDecompressor_compressedDirectBuf);
 											ZlibDecompressor_compressedDirectBuf);
-	jint compressed_direct_buf_off = (*env)->GetIntField(env, this, 
+	compressed_direct_buf_off = (*env)->GetIntField(env, this,
 									ZlibDecompressor_compressedDirectBufOff);
 									ZlibDecompressor_compressedDirectBufOff);
-	jint compressed_direct_buf_len = (*env)->GetIntField(env, this, 
+	compressed_direct_buf_len = (*env)->GetIntField(env, this,
 									ZlibDecompressor_compressedDirectBufLen);
 									ZlibDecompressor_compressedDirectBufLen);
 
 
-	jarray uncompressed_direct_buf = (jarray)(*env)->GetObjectField(env, this, 
+	uncompressed_direct_buf = (jarray)(*env)->GetObjectField(env, this,
 											ZlibDecompressor_uncompressedDirectBuf);
 											ZlibDecompressor_uncompressedDirectBuf);
-	jint uncompressed_direct_buf_len = (*env)->GetIntField(env, this, 
+	uncompressed_direct_buf_len = (*env)->GetIntField(env, this,
 										ZlibDecompressor_directBufferSize);
 										ZlibDecompressor_directBufferSize);
 
 
     // Get the input direct buffer
     // Get the input direct buffer
     LOCK_CLASS(env, clazz, "ZlibDecompressor");
     LOCK_CLASS(env, clazz, "ZlibDecompressor");
-	Bytef *compressed_bytes = (*env)->GetDirectBufferAddress(env, 
+	compressed_bytes = (*env)->GetDirectBufferAddress(env,
 										compressed_direct_buf);
 										compressed_direct_buf);
     UNLOCK_CLASS(env, clazz, "ZlibDecompressor");
     UNLOCK_CLASS(env, clazz, "ZlibDecompressor");
-    
+
 	if (!compressed_bytes) {
 	if (!compressed_bytes) {
 	    return (jint)0;
 	    return (jint)0;
 	}
 	}
-	
+
     // Get the output direct buffer
     // Get the output direct buffer
     LOCK_CLASS(env, clazz, "ZlibDecompressor");
     LOCK_CLASS(env, clazz, "ZlibDecompressor");
-	Bytef *uncompressed_bytes = (*env)->GetDirectBufferAddress(env, 
+	uncompressed_bytes = (*env)->GetDirectBufferAddress(env,
 											uncompressed_direct_buf);
 											uncompressed_direct_buf);
     UNLOCK_CLASS(env, clazz, "ZlibDecompressor");
     UNLOCK_CLASS(env, clazz, "ZlibDecompressor");
 
 
 	if (!uncompressed_bytes) {
 	if (!uncompressed_bytes) {
 	    return (jint)0;
 	    return (jint)0;
 	}
 	}
-	
+
 	// Re-calibrate the z_stream
 	// Re-calibrate the z_stream
 	stream->next_in  = compressed_bytes + compressed_direct_buf_off;
 	stream->next_in  = compressed_bytes + compressed_direct_buf_off;
 	stream->next_out = uncompressed_bytes;
 	stream->next_out = uncompressed_bytes;
 	stream->avail_in  = compressed_direct_buf_len;
 	stream->avail_in  = compressed_direct_buf_len;
 	stream->avail_out = uncompressed_direct_buf_len;
 	stream->avail_out = uncompressed_direct_buf_len;
-	
+
 	// Decompress
 	// Decompress
-	int rv = dlsym_inflate(stream, Z_PARTIAL_FLUSH);
+	rv = dlsym_inflate(stream, Z_PARTIAL_FLUSH);
 
 
 	// Contingency? - Report error by throwing appropriate exceptions
 	// Contingency? - Report error by throwing appropriate exceptions
-	int no_decompressed_bytes = 0;	
 	switch (rv) {
 	switch (rv) {
 		case Z_STREAM_END:
 		case Z_STREAM_END:
 		{
 		{
@@ -216,9 +270,9 @@ Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_inflateBytesDirect(
 		case Z_OK:
 		case Z_OK:
 		{
 		{
 		    compressed_direct_buf_off += compressed_direct_buf_len - stream->avail_in;
 		    compressed_direct_buf_off += compressed_direct_buf_len - stream->avail_in;
-		    (*env)->SetIntField(env, this, ZlibDecompressor_compressedDirectBufOff, 
+		    (*env)->SetIntField(env, this, ZlibDecompressor_compressedDirectBufOff,
 		    			compressed_direct_buf_off);
 		    			compressed_direct_buf_off);
-		    (*env)->SetIntField(env, this, ZlibDecompressor_compressedDirectBufLen, 
+		    (*env)->SetIntField(env, this, ZlibDecompressor_compressedDirectBufLen,
 		    			stream->avail_in);
 		    			stream->avail_in);
 		    no_decompressed_bytes = uncompressed_direct_buf_len - stream->avail_out;
 		    no_decompressed_bytes = uncompressed_direct_buf_len - stream->avail_out;
 		}
 		}
@@ -227,9 +281,9 @@ Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_inflateBytesDirect(
 		{
 		{
 		    (*env)->SetBooleanField(env, this, ZlibDecompressor_needDict, JNI_TRUE);
 		    (*env)->SetBooleanField(env, this, ZlibDecompressor_needDict, JNI_TRUE);
 		    compressed_direct_buf_off += compressed_direct_buf_len - stream->avail_in;
 		    compressed_direct_buf_off += compressed_direct_buf_len - stream->avail_in;
-		    (*env)->SetIntField(env, this, ZlibDecompressor_compressedDirectBufOff, 
+		    (*env)->SetIntField(env, this, ZlibDecompressor_compressedDirectBufOff,
 		    			compressed_direct_buf_off);
 		    			compressed_direct_buf_off);
-		    (*env)->SetIntField(env, this, ZlibDecompressor_compressedDirectBufLen, 
+		    (*env)->SetIntField(env, this, ZlibDecompressor_compressedDirectBufLen,
 		    			stream->avail_in);
 		    			stream->avail_in);
 		}
 		}
 		break;
 		break;
@@ -251,7 +305,7 @@ Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_inflateBytesDirect(
 		}
 		}
 		break;
 		break;
     }
     }
-    
+
     return no_decompressed_bytes;
     return no_decompressed_bytes;
 }
 }
 
 
@@ -299,4 +353,3 @@ Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_end(
 /**
 /**
  * vim: sw=2: ts=2: et:
  * vim: sw=2: ts=2: et:
  */
  */
-

+ 14 - 5
hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/org_apache_hadoop_io_compress_zlib.h

@@ -19,14 +19,23 @@
 #if !defined ORG_APACHE_HADOOP_IO_COMPRESS_ZLIB_ZLIB_H
 #if !defined ORG_APACHE_HADOOP_IO_COMPRESS_ZLIB_ZLIB_H
 #define ORG_APACHE_HADOOP_IO_COMPRESS_ZLIB_ZLIB_H
 #define ORG_APACHE_HADOOP_IO_COMPRESS_ZLIB_ZLIB_H
 
 
-#include <dlfcn.h>
-#include <jni.h>
+#include "org_apache_hadoop.h"
+
+#ifdef UNIX
+#include <config.h>
 #include <stddef.h>
 #include <stddef.h>
-#include <zconf.h>
 #include <zlib.h>
 #include <zlib.h>
+#include <zconf.h>
+#include <dlfcn.h>
+#include <jni.h>
+#endif
 
 
-#include "config.h"
-#include "org_apache_hadoop.h"
+#ifdef WINDOWS
+#include <jni.h>
+#define HADOOP_ZLIB_LIBRARY L"zlib1.dll"
+#include <zlib.h>
+#include <zconf.h>
+#endif
 
 
 /* A helper macro to convert the java 'stream-handle' to a z_stream pointer. */
 /* A helper macro to convert the java 'stream-handle' to a z_stream pointer. */
 #define ZSTREAM(stream) ((z_stream*)((ptrdiff_t)(stream)))
 #define ZSTREAM(stream) ((z_stream*)((ptrdiff_t)(stream)))

+ 374 - 25
hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c

@@ -18,6 +18,10 @@
 
 
 #define _GNU_SOURCE
 #define _GNU_SOURCE
 
 
+#include "org_apache_hadoop.h"
+#include "org_apache_hadoop_io_nativeio_NativeIO.h"
+
+#ifdef UNIX
 #include <assert.h>
 #include <assert.h>
 #include <errno.h>
 #include <errno.h>
 #include <fcntl.h>
 #include <fcntl.h>
@@ -30,14 +34,19 @@
 #include <sys/types.h>
 #include <sys/types.h>
 #include <sys/syscall.h>
 #include <sys/syscall.h>
 #include <unistd.h>
 #include <unistd.h>
-
 #include "config.h"
 #include "config.h"
-#include "org_apache_hadoop.h"
-#include "org_apache_hadoop_io_nativeio_NativeIO.h"
+#endif
+
+#ifdef WINDOWS
+#include <assert.h>
+#include <Windows.h>
+#include "winutils.h"
+#endif
+
 #include "file_descriptor.h"
 #include "file_descriptor.h"
 #include "errno_enum.h"
 #include "errno_enum.h"
 
 
-// the NativeIO$Stat inner class and its constructor
+// the NativeIO$POSIX$Stat inner class and its constructor
 static jclass stat_clazz;
 static jclass stat_clazz;
 static jmethodID stat_ctor;
 static jmethodID stat_ctor;
 
 
@@ -52,26 +61,32 @@ static jobject pw_lock_object;
 
 
 // Internal functions
 // Internal functions
 static void throw_ioe(JNIEnv* env, int errnum);
 static void throw_ioe(JNIEnv* env, int errnum);
+#ifdef UNIX
 static ssize_t get_pw_buflen();
 static ssize_t get_pw_buflen();
+#endif
 
 
 /**
 /**
  * Returns non-zero if the user has specified that the system
  * Returns non-zero if the user has specified that the system
  * has non-threadsafe implementations of getpwuid_r or getgrgid_r.
  * has non-threadsafe implementations of getpwuid_r or getgrgid_r.
  **/
  **/
 static int workaround_non_threadsafe_calls(JNIEnv *env, jclass clazz) {
 static int workaround_non_threadsafe_calls(JNIEnv *env, jclass clazz) {
-  jfieldID needs_workaround_field = (*env)->GetStaticFieldID(env, clazz,
-    "workaroundNonThreadSafePasswdCalls", "Z");
+  jboolean result;
+  jfieldID needs_workaround_field = (*env)->GetStaticFieldID(
+    env, clazz,
+    "workaroundNonThreadSafePasswdCalls",
+    "Z");
   PASS_EXCEPTIONS_RET(env, 0);
   PASS_EXCEPTIONS_RET(env, 0);
   assert(needs_workaround_field);
   assert(needs_workaround_field);
 
 
-  jboolean result = (*env)->GetStaticBooleanField(
+  result = (*env)->GetStaticBooleanField(
     env, clazz, needs_workaround_field);
     env, clazz, needs_workaround_field);
   return result;
   return result;
 }
 }
 
 
+#ifdef UNIX
 static void stat_init(JNIEnv *env, jclass nativeio_class) {
 static void stat_init(JNIEnv *env, jclass nativeio_class) {
   // Init Stat
   // Init Stat
-  jclass clazz = (*env)->FindClass(env, "org/apache/hadoop/io/nativeio/NativeIO$Stat");
+  jclass clazz = (*env)->FindClass(env, "org/apache/hadoop/io/nativeio/NativeIO$POSIX$Stat");
   if (!clazz) {
   if (!clazz) {
     return; // exception has been raised
     return; // exception has been raised
   }
   }
@@ -84,6 +99,7 @@ static void stat_init(JNIEnv *env, jclass nativeio_class) {
   if (!stat_ctor) {
   if (!stat_ctor) {
     return; // exception has been raised
     return; // exception has been raised
   }
   }
+
   jclass obj_class = (*env)->FindClass(env, "java/lang/Object");
   jclass obj_class = (*env)->FindClass(env, "java/lang/Object");
   if (!obj_class) {
   if (!obj_class) {
     return; // exception has been raised
     return; // exception has been raised
@@ -98,6 +114,7 @@ static void stat_init(JNIEnv *env, jclass nativeio_class) {
     pw_lock_object = (*env)->NewObject(env, obj_class, obj_ctor);
     pw_lock_object = (*env)->NewObject(env, obj_class, obj_ctor);
     PASS_EXCEPTIONS(env);
     PASS_EXCEPTIONS(env);
     pw_lock_object = (*env)->NewGlobalRef(env, pw_lock_object);
     pw_lock_object = (*env)->NewGlobalRef(env, pw_lock_object);
+
     PASS_EXCEPTIONS(env);
     PASS_EXCEPTIONS(env);
   }
   }
 }
 }
@@ -112,6 +129,7 @@ static void stat_deinit(JNIEnv *env) {
     pw_lock_object = NULL;
     pw_lock_object = NULL;
   }
   }
 }
 }
+#endif
 
 
 static void nioe_init(JNIEnv *env) {
 static void nioe_init(JNIEnv *env) {
   // Init NativeIOException
   // Init NativeIOException
@@ -120,8 +138,15 @@ static void nioe_init(JNIEnv *env) {
   PASS_EXCEPTIONS(env);
   PASS_EXCEPTIONS(env);
 
 
   nioe_clazz = (*env)->NewGlobalRef(env, nioe_clazz);
   nioe_clazz = (*env)->NewGlobalRef(env, nioe_clazz);
+#ifdef UNIX
   nioe_ctor = (*env)->GetMethodID(env, nioe_clazz, "<init>",
   nioe_ctor = (*env)->GetMethodID(env, nioe_clazz, "<init>",
     "(Ljava/lang/String;Lorg/apache/hadoop/io/nativeio/Errno;)V");
     "(Ljava/lang/String;Lorg/apache/hadoop/io/nativeio/Errno;)V");
+#endif
+
+#ifdef WINDOWS
+  nioe_ctor = (*env)->GetMethodID(env, nioe_clazz, "<init>",
+    "(Ljava/lang/String;I)V");
+#endif
 }
 }
 
 
 static void nioe_deinit(JNIEnv *env) {
 static void nioe_deinit(JNIEnv *env) {
@@ -142,32 +167,46 @@ static void nioe_deinit(JNIEnv *env) {
 JNIEXPORT void JNICALL
 JNIEXPORT void JNICALL
 Java_org_apache_hadoop_io_nativeio_NativeIO_initNative(
 Java_org_apache_hadoop_io_nativeio_NativeIO_initNative(
 	JNIEnv *env, jclass clazz) {
 	JNIEnv *env, jclass clazz) {
-
+#ifdef UNIX
   stat_init(env, clazz);
   stat_init(env, clazz);
   PASS_EXCEPTIONS_GOTO(env, error);
   PASS_EXCEPTIONS_GOTO(env, error);
+#endif
   nioe_init(env);
   nioe_init(env);
   PASS_EXCEPTIONS_GOTO(env, error);
   PASS_EXCEPTIONS_GOTO(env, error);
   fd_init(env);
   fd_init(env);
   PASS_EXCEPTIONS_GOTO(env, error);
   PASS_EXCEPTIONS_GOTO(env, error);
+#ifdef UNIX
   errno_enum_init(env);
   errno_enum_init(env);
   PASS_EXCEPTIONS_GOTO(env, error);
   PASS_EXCEPTIONS_GOTO(env, error);
+#endif
   return;
   return;
 error:
 error:
   // these are all idempodent and safe to call even if the
   // these are all idempodent and safe to call even if the
   // class wasn't initted yet
   // class wasn't initted yet
+#ifdef UNIX
   stat_deinit(env);
   stat_deinit(env);
+#endif
   nioe_deinit(env);
   nioe_deinit(env);
   fd_deinit(env);
   fd_deinit(env);
+#ifdef UNIX
   errno_enum_deinit(env);
   errno_enum_deinit(env);
+#endif
 }
 }
 
 
 /*
 /*
+ * Class:     org_apache_hadoop_io_nativeio_NativeIO_POSIX
+ * Method:    fstat
+ * Signature: (Ljava/io/FileDescriptor;)Lorg/apache/hadoop/io/nativeio/NativeIO$POSIX$Stat;
  * public static native Stat fstat(FileDescriptor fd);
  * public static native Stat fstat(FileDescriptor fd);
+ *
+ * The "00024" in the function name is an artifact of how JNI encodes
+ * special characters. U+0024 is '$'.
  */
  */
 JNIEXPORT jobject JNICALL
 JNIEXPORT jobject JNICALL
-Java_org_apache_hadoop_io_nativeio_NativeIO_fstat(
+Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_fstat(
   JNIEnv *env, jclass clazz, jobject fd_object)
   JNIEnv *env, jclass clazz, jobject fd_object)
 {
 {
+#ifdef UNIX
   jobject ret = NULL;
   jobject ret = NULL;
 
 
   int fd = fd_get(env, fd_object);
   int fd = fd_get(env, fd_object);
@@ -186,14 +225,26 @@ Java_org_apache_hadoop_io_nativeio_NativeIO_fstat(
 
 
 cleanup:
 cleanup:
   return ret;
   return ret;
+#endif
+
+#ifdef WINDOWS
+  THROW(env, "java/io/IOException",
+    "The function POSIX.fstat() is not supported on Windows");
+  return NULL;
+#endif
 }
 }
 
 
+
+
 /**
 /**
  * public static native void posix_fadvise(
  * public static native void posix_fadvise(
  *   FileDescriptor fd, long offset, long len, int flags);
  *   FileDescriptor fd, long offset, long len, int flags);
+ *
+ * The "00024" in the function name is an artifact of how JNI encodes
+ * special characters. U+0024 is '$'.
  */
  */
 JNIEXPORT void JNICALL
 JNIEXPORT void JNICALL
-Java_org_apache_hadoop_io_nativeio_NativeIO_posix_1fadvise(
+Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_posix_1fadvise(
   JNIEnv *env, jclass clazz,
   JNIEnv *env, jclass clazz,
   jobject fd_object, jlong offset, jlong len, jint flags)
   jobject fd_object, jlong offset, jlong len, jint flags)
 {
 {
@@ -239,9 +290,12 @@ static int manual_sync_file_range (int fd, __off64_t from, __off64_t to, unsigne
 /**
 /**
  * public static native void sync_file_range(
  * public static native void sync_file_range(
  *   FileDescriptor fd, long offset, long len, int flags);
  *   FileDescriptor fd, long offset, long len, int flags);
+ *
+ * The "00024" in the function name is an artifact of how JNI encodes
+ * special characters. U+0024 is '$'.
  */
  */
 JNIEXPORT void JNICALL
 JNIEXPORT void JNICALL
-Java_org_apache_hadoop_io_nativeio_NativeIO_sync_1file_1range(
+Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_sync_1file_1range(
   JNIEnv *env, jclass clazz,
   JNIEnv *env, jclass clazz,
   jobject fd_object, jlong offset, jlong len, jint flags)
   jobject fd_object, jlong offset, jlong len, jint flags)
 {
 {
@@ -283,13 +337,20 @@ static int toFreeBSDFlags(int flags)
 #endif
 #endif
 
 
 /*
 /*
+ * Class:     org_apache_hadoop_io_nativeio_NativeIO_POSIX
+ * Method:    open
+ * Signature: (Ljava/lang/String;II)Ljava/io/FileDescriptor;
  * public static native FileDescriptor open(String path, int flags, int mode);
  * public static native FileDescriptor open(String path, int flags, int mode);
+ *
+ * The "00024" in the function name is an artifact of how JNI encodes
+ * special characters. U+0024 is '$'.
  */
  */
 JNIEXPORT jobject JNICALL
 JNIEXPORT jobject JNICALL
-Java_org_apache_hadoop_io_nativeio_NativeIO_open(
+Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_open(
   JNIEnv *env, jclass clazz, jstring j_path,
   JNIEnv *env, jclass clazz, jstring j_path,
   jint flags, jint mode)
   jint flags, jint mode)
 {
 {
+#ifdef UNIX
 #ifdef __FreeBSD__
 #ifdef __FreeBSD__
   flags = toFreeBSDFlags(flags);
   flags = toFreeBSDFlags(flags);
 #endif
 #endif
@@ -317,16 +378,90 @@ cleanup:
     (*env)->ReleaseStringUTFChars(env, j_path, path);
     (*env)->ReleaseStringUTFChars(env, j_path, path);
   }
   }
   return ret;
   return ret;
+#endif
+
+#ifdef WINDOWS
+  THROW(env, "java/io/IOException",
+    "The function POSIX.open() is not supported on Windows");
+  return NULL;
+#endif
 }
 }
 
 
-/**
- * public static native void chmod(String path, int mode) throws IOException;
+/*
+ * Class:     org_apache_hadoop_io_nativeio_NativeIO_Windows
+ * Method:    createFile
+ * Signature: (Ljava/lang/String;JJJ)Ljava/io/FileDescriptor;
+ *
+ * The "00024" in the function name is an artifact of how JNI encodes
+ * special characters. U+0024 is '$'.
  */
  */
-JNIEXPORT void JNICALL
-Java_org_apache_hadoop_io_nativeio_NativeIO_chmod(
-  JNIEnv *env, jclass clazz, jstring j_path,
-  jint mode)
+JNIEXPORT jobject JNICALL Java_org_apache_hadoop_io_nativeio_NativeIO_00024Windows_createFile
+  (JNIEnv *env, jclass clazz, jstring j_path,
+  jlong desiredAccess, jlong shareMode, jlong creationDisposition)
+{
+#ifdef UNIX
+  THROW(env, "java/io/IOException",
+    "The function Windows.createFile() is not supported on Unix");
+  return NULL;
+#endif
+
+#ifdef WINDOWS
+  DWORD dwRtnCode = ERROR_SUCCESS;
+  BOOL isSymlink = FALSE;
+  BOOL isJunction = FALSE;
+  DWORD dwFlagsAndAttributes = FILE_ATTRIBUTE_NORMAL | FILE_FLAG_BACKUP_SEMANTICS;
+  jobject ret = (jobject) NULL;
+  HANDLE hFile = INVALID_HANDLE_VALUE;
+  WCHAR *path = (WCHAR *) (*env)->GetStringChars(env, j_path, (jboolean*)NULL);
+  if (path == NULL) goto cleanup;
+
+  // Set the flag for a symbolic link or a junctions point only when it exists.
+  // According to MSDN if the call to CreateFile() function creates a file,
+  // there is no change in behavior. So we do not throw if no file is found.
+  //
+  dwRtnCode = SymbolicLinkCheck(path, &isSymlink);
+  if (dwRtnCode != ERROR_SUCCESS && dwRtnCode != ERROR_FILE_NOT_FOUND) {
+    throw_ioe(env, dwRtnCode);
+    goto cleanup;
+  }
+  dwRtnCode = JunctionPointCheck(path, &isJunction);
+  if (dwRtnCode != ERROR_SUCCESS && dwRtnCode != ERROR_FILE_NOT_FOUND) {
+    throw_ioe(env, dwRtnCode);
+    goto cleanup;
+  }
+  if (isSymlink || isJunction)
+    dwFlagsAndAttributes |= FILE_FLAG_OPEN_REPARSE_POINT;
+
+  hFile = CreateFile(path,
+    (DWORD) desiredAccess,
+    (DWORD) shareMode,
+    (LPSECURITY_ATTRIBUTES ) NULL,
+    (DWORD) creationDisposition,
+    dwFlagsAndAttributes,
+    NULL);
+  if (hFile == INVALID_HANDLE_VALUE) {
+    throw_ioe(env, GetLastError());
+    goto cleanup;
+  }
+
+  ret = fd_create(env, (long) hFile);
+cleanup:
+  if (path != NULL) {
+    (*env)->ReleaseStringChars(env, j_path, (const jchar*)path);
+  }
+  return (jobject) ret;
+#endif
+}
+
+/*
+ * Class:     org_apache_hadoop_io_nativeio_NativeIO_POSIX
+ * Method:    chmod
+ * Signature: (Ljava/lang/String;I)V
+ */
+JNIEXPORT void JNICALL Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_chmodImpl
+  (JNIEnv *env, jclass clazz, jstring j_path, jint mode)
 {
 {
+#ifdef UNIX
   const char *path = (*env)->GetStringUTFChars(env, j_path, NULL);
   const char *path = (*env)->GetStringUTFChars(env, j_path, NULL);
   if (path == NULL) return; // JVM throws Exception for us
   if (path == NULL) return; // JVM throws Exception for us
 
 
@@ -335,15 +470,30 @@ Java_org_apache_hadoop_io_nativeio_NativeIO_chmod(
   }
   }
 
 
   (*env)->ReleaseStringUTFChars(env, j_path, path);
   (*env)->ReleaseStringUTFChars(env, j_path, path);
+#endif
+
+#ifdef WINDOWS
+  DWORD dwRtnCode = ERROR_SUCCESS;
+  LPCWSTR path = (LPCWSTR) (*env)->GetStringChars(env, j_path, NULL);
+  if (path == NULL) return; // JVM throws Exception for us
+
+  if ((dwRtnCode = ChangeFileModeByMask((LPCWSTR) path, mode)) != ERROR_SUCCESS)
+  {
+    throw_ioe(env, dwRtnCode);
+  }
+
+  (*env)->ReleaseStringChars(env, j_path, (const jchar*) path);
+#endif
 }
 }
 
 
 /*
 /*
  * static native String getUserName(int uid);
  * static native String getUserName(int uid);
  */
  */
 JNIEXPORT jstring JNICALL 
 JNIEXPORT jstring JNICALL 
-Java_org_apache_hadoop_io_nativeio_NativeIO_getUserName(JNIEnv *env, 
-jclass clazz, jint uid)
+Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_getUserName(
+  JNIEnv *env, jclass clazz, jint uid)
 {
 {
+#ifdef UNIX
   int pw_lock_locked = 0;
   int pw_lock_locked = 0;
   if (pw_lock_object != NULL) {
   if (pw_lock_object != NULL) {
     if ((*env)->MonitorEnter(env, pw_lock_object) != JNI_OK) {
     if ((*env)->MonitorEnter(env, pw_lock_object) != JNI_OK) {
@@ -395,15 +545,26 @@ cleanup:
   }
   }
   if (pw_buf != NULL) free(pw_buf);
   if (pw_buf != NULL) free(pw_buf);
   return jstr_username;
   return jstr_username;
+#endif // UNIX
+
+#ifdef WINDOWS
+  THROW(env, "java/io/IOException",
+    "The function POSIX.getUserName() is not supported on Windows");
+  return NULL;
+#endif
 }
 }
 
 
 /*
 /*
  * static native String getGroupName(int gid);
  * static native String getGroupName(int gid);
+ *
+ * The "00024" in the function name is an artifact of how JNI encodes
+ * special characters. U+0024 is '$'.
  */
  */
 JNIEXPORT jstring JNICALL 
 JNIEXPORT jstring JNICALL 
-Java_org_apache_hadoop_io_nativeio_NativeIO_getGroupName(JNIEnv *env, 
-jclass clazz, jint gid)
+Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_getGroupName(
+  JNIEnv *env, jclass clazz, jint gid)
 {
 {
+#ifdef UNIX
   int pw_lock_locked = 0;
   int pw_lock_locked = 0;
  
  
   if (pw_lock_object != NULL) {
   if (pw_lock_object != NULL) {
@@ -457,14 +618,21 @@ cleanup:
   }
   }
   if (pw_buf != NULL) free(pw_buf);
   if (pw_buf != NULL) free(pw_buf);
   return jstr_groupname;
   return jstr_groupname;
-}
+#endif  //   UNIX
 
 
+#ifdef WINDOWS
+  THROW(env, "java/io/IOException",
+    "The function POSIX.getUserName() is not supported on Windows");
+  return NULL;
+#endif
+}
 
 
 /*
 /*
  * Throw a java.IO.IOException, generating the message from errno.
  * Throw a java.IO.IOException, generating the message from errno.
  */
  */
 static void throw_ioe(JNIEnv* env, int errnum)
 static void throw_ioe(JNIEnv* env, int errnum)
 {
 {
+#ifdef UNIX
   char message[80];
   char message[80];
   jstring jstr_message;
   jstring jstr_message;
 
 
@@ -489,9 +657,51 @@ static void throw_ioe(JNIEnv* env, int errnum)
 err:
 err:
   if (jstr_message != NULL)
   if (jstr_message != NULL)
     (*env)->ReleaseStringUTFChars(env, jstr_message, message);
     (*env)->ReleaseStringUTFChars(env, jstr_message, message);
-}
+#endif
+
+#ifdef WINDOWS
+  DWORD len = 0;
+  LPWSTR buffer = NULL;
+  const jchar* message = NULL;
+  jstring jstr_message = NULL;
+  jthrowable obj = NULL;
+
+  len = FormatMessageW(
+    FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_FROM_SYSTEM,
+    NULL, *(DWORD*) (&errnum), // reinterpret cast
+    MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT),
+    (LPWSTR) &buffer, 0, NULL);
+
+  if (len > 0)
+  {
+    message = (const jchar*) buffer;
+  }
+  else
+  {
+    message = (const jchar*) L"Unknown error.";
+  }
+
+  if ((jstr_message = (*env)->NewString(env, message, len)) == NULL)
+    goto err;
+  LocalFree(buffer);
+  buffer = NULL; // Set buffer to NULL to avoid double free
+
+  obj = (jthrowable)(*env)->NewObject(env, nioe_clazz, nioe_ctor,
+    jstr_message, errnum);
+  if (obj == NULL) goto err;
 
 
+  (*env)->Throw(env, obj);
+  return;
 
 
+err:
+  if (jstr_message != NULL)
+    (*env)->ReleaseStringChars(env, jstr_message, message);
+  LocalFree(buffer);
+  return;
+#endif
+}
+
+#ifdef UNIX
 /*
 /*
  * Determine how big a buffer we need for reentrant getpwuid_r and getgrnam_r
  * Determine how big a buffer we need for reentrant getpwuid_r and getgrnam_r
  */
  */
@@ -502,6 +712,145 @@ ssize_t get_pw_buflen() {
   #endif
   #endif
   return (ret > 512) ? ret : 512;
   return (ret > 512) ? ret : 512;
 }
 }
+#endif
+
+
+/*
+ * Class:     org_apache_hadoop_io_nativeio_NativeIO_Windows
+ * Method:    getOwnerOnWindows
+ * Signature: (Ljava/io/FileDescriptor;)Ljava/lang/String;
+ *
+ * The "00024" in the function name is an artifact of how JNI encodes
+ * special characters. U+0024 is '$'.
+ */
+JNIEXPORT jstring JNICALL
+Java_org_apache_hadoop_io_nativeio_NativeIO_00024Windows_getOwner
+  (JNIEnv *env, jclass clazz, jobject fd_object)
+{
+#ifdef UNIX
+  THROW(env, "java/io/IOException",
+    "The function Windows.getOwner() is not supported on Unix");
+  return NULL;
+#endif
+
+#ifdef WINDOWS
+  PSID pSidOwner = NULL;
+  PSECURITY_DESCRIPTOR pSD = NULL;
+  LPWSTR ownerName = (LPWSTR)NULL;
+  DWORD dwRtnCode = ERROR_SUCCESS;
+  jstring jstr_username = NULL;
+  HANDLE hFile = (HANDLE) fd_get(env, fd_object);
+  PASS_EXCEPTIONS_GOTO(env, cleanup);
+
+  dwRtnCode = GetSecurityInfo(
+    hFile,
+    SE_FILE_OBJECT,
+    OWNER_SECURITY_INFORMATION,
+    &pSidOwner,
+    NULL,
+    NULL,
+    NULL,
+    &pSD);
+  if (dwRtnCode != ERROR_SUCCESS) {
+    throw_ioe(env, dwRtnCode);
+    goto cleanup;
+  }
+
+  dwRtnCode = GetAccntNameFromSid(pSidOwner, &ownerName);
+  if (dwRtnCode != ERROR_SUCCESS) {
+    throw_ioe(env, dwRtnCode);
+    goto cleanup;
+  }
+
+  jstr_username = (*env)->NewString(env, ownerName, (jsize) wcslen(ownerName));
+  if (jstr_username == NULL) goto cleanup;
+
+cleanup:
+  LocalFree(ownerName);
+  LocalFree(pSD);
+  return jstr_username;
+#endif
+}
+
+/*
+ * Class:     org_apache_hadoop_io_nativeio_NativeIO_Windows
+ * Method:    setFilePointer
+ * Signature: (Ljava/io/FileDescriptor;JJ)J
+ *
+ * The "00024" in the function name is an artifact of how JNI encodes
+ * special characters. U+0024 is '$'.
+ */
+JNIEXPORT jlong JNICALL
+Java_org_apache_hadoop_io_nativeio_NativeIO_00024Windows_setFilePointer
+  (JNIEnv *env, jclass clazz, jobject fd_object, jlong distanceToMove, jlong moveMethod)
+{
+#ifdef UNIX
+  THROW(env, "java/io/IOException",
+    "The function setFilePointer(FileDescriptor) is not supported on Unix");
+  return NULL;
+#endif
+
+#ifdef WINDOWS
+  DWORD distanceToMoveLow = (DWORD) distanceToMove;
+  LONG distanceToMoveHigh = (LONG) (distanceToMove >> 32);
+  DWORD distanceMovedLow = 0;
+  HANDLE hFile = (HANDLE) fd_get(env, fd_object);
+  PASS_EXCEPTIONS_GOTO(env, cleanup);
+
+  distanceMovedLow = SetFilePointer(hFile,
+    distanceToMoveLow, &distanceToMoveHigh, (DWORD) moveMethod);
+
+  if (distanceMovedLow == INVALID_SET_FILE_POINTER) {
+     throw_ioe(env, GetLastError());
+     return -1;
+  }
+
+cleanup:
+
+  return ((jlong) distanceToMoveHigh << 32) | (jlong) distanceMovedLow;
+#endif
+}
+
+/*
+ * Class:     org_apache_hadoop_io_nativeio_NativeIO_Windows
+ * Method:    getLengthFollowSymlink
+ * Signature: (Ljava/lang/String;)J
+ *
+ * The "00024" in the function name is an artifact of how JNI encodes
+ * special characters. U+0024 is '$'.
+ */
+JNIEXPORT jlong JNICALL
+Java_org_apache_hadoop_io_nativeio_NativeIO_00024Windows_getLengthFollowSymlink
+  (JNIEnv *env, jclass clazz, jstring j_path)
+{
+#ifdef UNIX
+  THROW(env, "java/io/IOException",
+    "The function getLengthFollowSymlink(String) is not supported on Unix");
+  return 0;
+#endif
+
+#ifdef WINDOWS
+  DWORD dwRtnCode = ERROR_SUCCESS;
+  BY_HANDLE_FILE_INFORMATION fileInfo = { 0 };
+  LARGE_INTEGER fileSize = { 0 };
+
+  const wchar_t *path = (const wchar_t*) (*env)->GetStringChars(env, j_path, NULL);
+  if (path == NULL) return 0; // JVM throws Exception for us
+
+  dwRtnCode = GetFileInformationByName(path, TRUE, &fileInfo);
+  if (dwRtnCode != ERROR_SUCCESS) {
+    throw_ioe(env, dwRtnCode);
+  }
+
+  (*env)->ReleaseStringChars(env, j_path, path);
+
+  fileSize.HighPart = fileInfo.nFileSizeHigh;
+  fileSize.LowPart = fileInfo.nFileSizeLow;
+
+  return (jlong)(fileSize.QuadPart);
+#endif
+}
+
 /**
 /**
  * vim: sw=2: ts=2: et:
  * vim: sw=2: ts=2: et:
  */
  */

+ 43 - 2
hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c

@@ -14,7 +14,7 @@
  *  See the License for the specific language governing permissions and
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  *  limitations under the License.
  */
  */
- 
+
 #include <jni.h>
 #include <jni.h>
 #include "file_descriptor.h"
 #include "file_descriptor.h"
 #include "org_apache_hadoop.h"
 #include "org_apache_hadoop.h"
@@ -26,6 +26,10 @@ static jfieldID fd_descriptor;
 // the no-argument constructor
 // the no-argument constructor
 static jmethodID fd_constructor;
 static jmethodID fd_constructor;
 
 
+#ifdef WINDOWS
+// the internal field for the long handle
+static jfieldID fd_handle;
+#endif
 
 
 void fd_init(JNIEnv* env)
 void fd_init(JNIEnv* env)
 {
 {
@@ -37,6 +41,12 @@ void fd_init(JNIEnv* env)
 
 
   fd_descriptor = (*env)->GetFieldID(env, fd_class, "fd", "I");
   fd_descriptor = (*env)->GetFieldID(env, fd_class, "fd", "I");
   PASS_EXCEPTIONS(env);
   PASS_EXCEPTIONS(env);
+
+#ifdef WINDOWS
+  fd_handle = (*env)->GetFieldID(env, fd_class, "handle", "J");
+  PASS_EXCEPTIONS(env);
+#endif
+
   fd_constructor = (*env)->GetMethodID(env, fd_class, "<init>", "()V");
   fd_constructor = (*env)->GetMethodID(env, fd_class, "<init>", "()V");
 }
 }
 
 
@@ -46,9 +56,13 @@ void fd_deinit(JNIEnv *env) {
     fd_class = NULL;
     fd_class = NULL;
   }
   }
   fd_descriptor = NULL;
   fd_descriptor = NULL;
+#ifdef WINDOWS
+  fd_handle = NULL;
+#endif
   fd_constructor = NULL;
   fd_constructor = NULL;
 }
 }
 
 
+#ifdef UNIX
 /*
 /*
  * Given an instance 'obj' of java.io.FileDescriptor, return the
  * Given an instance 'obj' of java.io.FileDescriptor, return the
  * underlying fd, or throw if unavailable
  * underlying fd, or throw if unavailable
@@ -71,4 +85,31 @@ jobject fd_create(JNIEnv *env, int fd) {
 
 
   (*env)->SetIntField(env, obj, fd_descriptor, fd);
   (*env)->SetIntField(env, obj, fd_descriptor, fd);
   return obj;
   return obj;
-} 
+}
+#endif
+
+#ifdef WINDOWS
+/*
+ * Given an instance 'obj' of java.io.FileDescriptor, return the
+ * underlying fd, or throw if unavailable
+ */
+long fd_get(JNIEnv* env, jobject obj) {
+  if (obj == NULL) {
+    THROW(env, "java/lang/NullPointerException",
+          "FileDescriptor object is null");
+    return -1;
+  }
+  return (long) (*env)->GetLongField(env, obj, fd_handle);
+}
+
+/*
+ * Create a FileDescriptor object corresponding to the given int fd
+ */
+jobject fd_create(JNIEnv *env, long fd) {
+  jobject obj = (*env)->NewObject(env, fd_class, fd_constructor);
+  PASS_EXCEPTIONS_RET(env, (jobject) NULL);
+
+  (*env)->SetLongField(env, obj, fd_handle, fd);
+  return obj;
+}
+#endif

+ 8 - 0
hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.h

@@ -18,11 +18,19 @@
 #define FILE_DESCRIPTOR_H
 #define FILE_DESCRIPTOR_H
 
 
 #include <jni.h>
 #include <jni.h>
+#include "org_apache_hadoop.h"
 
 
 void fd_init(JNIEnv *env);
 void fd_init(JNIEnv *env);
 void fd_deinit(JNIEnv *env);
 void fd_deinit(JNIEnv *env);
 
 
+#ifdef UNIX
 int fd_get(JNIEnv* env, jobject obj);
 int fd_get(JNIEnv* env, jobject obj);
 jobject fd_create(JNIEnv *env, int fd);
 jobject fd_create(JNIEnv *env, int fd);
+#endif
+
+#ifdef WINDOWS
+long fd_get(JNIEnv* env, jobject obj);
+jobject fd_create(JNIEnv *env, long fd);
+#endif
 
 
 #endif
 #endif

+ 39 - 15
hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c

@@ -16,18 +16,22 @@
  * limitations under the License.
  * limitations under the License.
  */
  */
 
 
-#include <arpa/inet.h>
+#include "org_apache_hadoop.h"
+#include "org_apache_hadoop_util_NativeCrc32.h"
+
 #include <assert.h>
 #include <assert.h>
-#include <inttypes.h>
 #include <stdlib.h>
 #include <stdlib.h>
 #include <stdint.h>
 #include <stdint.h>
 #include <string.h>
 #include <string.h>
-#include <unistd.h>
 
 
+#ifdef UNIX
+#include <inttypes.h>
+#include <arpa/inet.h>
+#include <unistd.h>
 #include "config.h"
 #include "config.h"
-#include "org_apache_hadoop.h"
-#include "org_apache_hadoop_util_NativeCrc32.h"
 #include "gcc_optimizations.h"
 #include "gcc_optimizations.h"
+#endif // UNIX
+
 #include "bulk_crc32.h"
 #include "bulk_crc32.h"
 
 
 static void throw_checksum_exception(JNIEnv *env,
 static void throw_checksum_exception(JNIEnv *env,
@@ -36,6 +40,9 @@ static void throw_checksum_exception(JNIEnv *env,
   char message[1024];
   char message[1024];
   jstring jstr_message;
   jstring jstr_message;
   char *filename;
   char *filename;
+  jclass checksum_exception_clazz;
+  jmethodID checksum_exception_ctor;
+  jthrowable obj;
 
 
   // Get filename as C string, or "null" if not provided
   // Get filename as C string, or "null" if not provided
   if (j_filename == NULL) {
   if (j_filename == NULL) {
@@ -50,28 +57,38 @@ static void throw_checksum_exception(JNIEnv *env,
   }
   }
 
 
   // Format error message
   // Format error message
+#ifdef WINDOWS
+  _snprintf_s(
+	message,
+	sizeof(message),
+	_TRUNCATE,
+    "Checksum error: %s at %I64d exp: %d got: %d",
+    filename, pos, expected_crc, got_crc);
+#else
   snprintf(message, sizeof(message),
   snprintf(message, sizeof(message),
     "Checksum error: %s at %"PRId64" exp: %"PRId32" got: %"PRId32,
     "Checksum error: %s at %"PRId64" exp: %"PRId32" got: %"PRId32,
     filename, pos, expected_crc, got_crc);
     filename, pos, expected_crc, got_crc);
+#endif // WINDOWS
+
   if ((jstr_message = (*env)->NewStringUTF(env, message)) == NULL) {
   if ((jstr_message = (*env)->NewStringUTF(env, message)) == NULL) {
     goto cleanup;
     goto cleanup;
   }
   }
  
  
   // Throw exception
   // Throw exception
-  jclass checksum_exception_clazz = (*env)->FindClass(
+  checksum_exception_clazz = (*env)->FindClass(
     env, "org/apache/hadoop/fs/ChecksumException");
     env, "org/apache/hadoop/fs/ChecksumException");
   if (checksum_exception_clazz == NULL) {
   if (checksum_exception_clazz == NULL) {
     goto cleanup;
     goto cleanup;
   }
   }
 
 
-  jmethodID checksum_exception_ctor = (*env)->GetMethodID(env,
+  checksum_exception_ctor = (*env)->GetMethodID(env,
     checksum_exception_clazz, "<init>",
     checksum_exception_clazz, "<init>",
     "(Ljava/lang/String;J)V");
     "(Ljava/lang/String;J)V");
   if (checksum_exception_ctor == NULL) {
   if (checksum_exception_ctor == NULL) {
     goto cleanup;
     goto cleanup;
   }
   }
 
 
-  jthrowable obj = (jthrowable)(*env)->NewObject(env, checksum_exception_clazz,
+  obj = (jthrowable)(*env)->NewObject(env, checksum_exception_clazz,
     checksum_exception_ctor, jstr_message, pos);
     checksum_exception_ctor, jstr_message, pos);
   if (obj == NULL) goto cleanup;
   if (obj == NULL) goto cleanup;
 
 
@@ -103,6 +120,14 @@ JNIEXPORT void JNICALL Java_org_apache_hadoop_util_NativeCrc32_nativeVerifyChunk
     jobject j_data, jint data_offset, jint data_len,
     jobject j_data, jint data_offset, jint data_len,
     jstring j_filename, jlong base_pos)
     jstring j_filename, jlong base_pos)
 {
 {
+  uint8_t *sums_addr;
+  uint8_t *data_addr;
+  uint32_t *sums;
+  uint8_t *data;
+  int crc_type;
+  crc32_error_t error_data;
+  int ret;
+
   if (unlikely(!j_sums || !j_data)) {
   if (unlikely(!j_sums || !j_data)) {
     THROW(env, "java/lang/NullPointerException",
     THROW(env, "java/lang/NullPointerException",
       "input ByteBuffers must not be null");
       "input ByteBuffers must not be null");
@@ -110,8 +135,8 @@ JNIEXPORT void JNICALL Java_org_apache_hadoop_util_NativeCrc32_nativeVerifyChunk
   }
   }
 
 
   // Convert direct byte buffers to C pointers
   // Convert direct byte buffers to C pointers
-  uint8_t *sums_addr = (*env)->GetDirectBufferAddress(env, j_sums);
-  uint8_t *data_addr = (*env)->GetDirectBufferAddress(env, j_data);
+  sums_addr = (*env)->GetDirectBufferAddress(env, j_sums);
+  data_addr = (*env)->GetDirectBufferAddress(env, j_data);
 
 
   if (unlikely(!sums_addr || !data_addr)) {
   if (unlikely(!sums_addr || !data_addr)) {
     THROW(env, "java/lang/IllegalArgumentException",
     THROW(env, "java/lang/IllegalArgumentException",
@@ -129,16 +154,15 @@ JNIEXPORT void JNICALL Java_org_apache_hadoop_util_NativeCrc32_nativeVerifyChunk
     return;
     return;
   }
   }
 
 
-  uint32_t *sums = (uint32_t *)(sums_addr + sums_offset);
-  uint8_t *data = data_addr + data_offset;
+  sums = (uint32_t *)(sums_addr + sums_offset);
+  data = data_addr + data_offset;
 
 
   // Convert to correct internal C constant for CRC type
   // Convert to correct internal C constant for CRC type
-  int crc_type = convert_java_crc_type(env, j_crc_type);
+  crc_type = convert_java_crc_type(env, j_crc_type);
   if (crc_type == -1) return; // exception already thrown
   if (crc_type == -1) return; // exception already thrown
 
 
   // Setup complete. Actually verify checksums.
   // Setup complete. Actually verify checksums.
-  crc32_error_t error_data;
-  int ret = bulk_verify_crc(data, data_len, sums, crc_type,
+  ret = bulk_verify_crc(data, data_len, sums, crc_type,
                             bytes_per_checksum, &error_data);
                             bytes_per_checksum, &error_data);
   if (likely(ret == CHECKSUMS_VALID)) {
   if (likely(ret == CHECKSUMS_VALID)) {
     return;
     return;

+ 18 - 8
hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c

@@ -21,25 +21,31 @@
  *   All rights reserved. Use of this source code is governed by a
  *   All rights reserved. Use of this source code is governed by a
  *   BSD-style license that can be found in the LICENSE file.
  *   BSD-style license that can be found in the LICENSE file.
  */
  */
+
+#include "org_apache_hadoop.h"
+
 #include <assert.h>
 #include <assert.h>
-#include <arpa/inet.h>
 #include <errno.h>
 #include <errno.h>
 #include <stdint.h>
 #include <stdint.h>
+
+#ifdef UNIX
+#include <arpa/inet.h>
 #include <unistd.h>
 #include <unistd.h>
+#endif // UNIX
 
 
 #include "crc32_zlib_polynomial_tables.h"
 #include "crc32_zlib_polynomial_tables.h"
 #include "crc32c_tables.h"
 #include "crc32c_tables.h"
 #include "bulk_crc32.h"
 #include "bulk_crc32.h"
 #include "gcc_optimizations.h"
 #include "gcc_optimizations.h"
 
 
-#ifndef __FreeBSD__
+#if (!defined(__FreeBSD__) && !defined(WINDOWS))
 #define USE_PIPELINED
 #define USE_PIPELINED
 #endif
 #endif
 
 
 #define CRC_INITIAL_VAL 0xffffffff
 #define CRC_INITIAL_VAL 0xffffffff
 
 
 typedef uint32_t (*crc_update_func_t)(uint32_t, const uint8_t *, size_t);
 typedef uint32_t (*crc_update_func_t)(uint32_t, const uint8_t *, size_t);
-static inline uint32_t crc_val(uint32_t crc);
+static uint32_t crc_val(uint32_t crc);
 static uint32_t crc32_zlib_sb8(uint32_t crc, const uint8_t *buf, size_t length);
 static uint32_t crc32_zlib_sb8(uint32_t crc, const uint8_t *buf, size_t length);
 static uint32_t crc32c_sb8(uint32_t crc, const uint8_t *buf, size_t length);
 static uint32_t crc32c_sb8(uint32_t crc, const uint8_t *buf, size_t length);
 
 
@@ -187,7 +193,7 @@ return_crc_error:
 /**
 /**
  * Extract the final result of a CRC
  * Extract the final result of a CRC
  */
  */
-static inline uint32_t crc_val(uint32_t crc) {
+uint32_t crc_val(uint32_t crc) {
   return ~crc;
   return ~crc;
 }
 }
 
 
@@ -200,11 +206,13 @@ static uint32_t crc32c_sb8(uint32_t crc, const uint8_t *buf, size_t length) {
   uint32_t end_bytes = length - running_length; 
   uint32_t end_bytes = length - running_length; 
   int li;
   int li;
   for (li=0; li < running_length/8; li++) {
   for (li=0; li < running_length/8; li++) {
+	uint32_t term1;
+	uint32_t term2;
     crc ^= *(uint32_t *)buf;
     crc ^= *(uint32_t *)buf;
     buf += 4;
     buf += 4;
-    uint32_t term1 = CRC32C_T8_7[crc & 0x000000FF] ^
+    term1 = CRC32C_T8_7[crc & 0x000000FF] ^
         CRC32C_T8_6[(crc >> 8) & 0x000000FF];
         CRC32C_T8_6[(crc >> 8) & 0x000000FF];
-    uint32_t term2 = crc >> 16;
+    term2 = crc >> 16;
     crc = term1 ^
     crc = term1 ^
         CRC32C_T8_5[term2 & 0x000000FF] ^ 
         CRC32C_T8_5[term2 & 0x000000FF] ^ 
         CRC32C_T8_4[(term2 >> 8) & 0x000000FF];
         CRC32C_T8_4[(term2 >> 8) & 0x000000FF];
@@ -234,11 +242,13 @@ static uint32_t crc32_zlib_sb8(
   uint32_t end_bytes = length - running_length; 
   uint32_t end_bytes = length - running_length; 
   int li;
   int li;
   for (li=0; li < running_length/8; li++) {
   for (li=0; li < running_length/8; li++) {
+	uint32_t term1;
+	uint32_t term2;
     crc ^= *(uint32_t *)buf;
     crc ^= *(uint32_t *)buf;
     buf += 4;
     buf += 4;
-    uint32_t term1 = CRC32_T8_7[crc & 0x000000FF] ^
+    term1 = CRC32_T8_7[crc & 0x000000FF] ^
         CRC32_T8_6[(crc >> 8) & 0x000000FF];
         CRC32_T8_6[(crc >> 8) & 0x000000FF];
-    uint32_t term2 = crc >> 16;
+    term2 = crc >> 16;
     crc = term1 ^
     crc = term1 ^
         CRC32_T8_5[term2 & 0x000000FF] ^ 
         CRC32_T8_5[term2 & 0x000000FF] ^ 
         CRC32_T8_4[(term2 >> 8) & 0x000000FF];
         CRC32_T8_4[(term2 >> 8) & 0x000000FF];

+ 3 - 0
hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.h

@@ -19,7 +19,10 @@
 #define BULK_CRC32_H_INCLUDED
 #define BULK_CRC32_H_INCLUDED
 
 
 #include <stdint.h>
 #include <stdint.h>
+
+#ifdef UNIX
 #include <unistd.h> /* for size_t */
 #include <unistd.h> /* for size_t */
+#endif // UNIX
 
 
 // Constants for different CRC algorithms
 // Constants for different CRC algorithms
 #define CRC32C_POLYNOMIAL 1
 #define CRC32C_POLYNOMIAL 1

+ 92 - 11
hadoop-common-project/hadoop-common/src/main/native/src/org_apache_hadoop.h

@@ -17,19 +17,22 @@
  */
  */
 
 
 /**
 /**
- * This file includes some common utilities 
+ * This file includes some common utilities
  * for all native code used in hadoop.
  * for all native code used in hadoop.
  */
  */
- 
+
 #if !defined ORG_APACHE_HADOOP_H
 #if !defined ORG_APACHE_HADOOP_H
 #define ORG_APACHE_HADOOP_H
 #define ORG_APACHE_HADOOP_H
 
 
-#include <dlfcn.h>
-#include <jni.h>
-
-#include "config.h"
+#if defined(_WIN32)
+#undef UNIX
+#define WINDOWS
+#else
+#undef WINDOWS
+#define UNIX
+#endif
 
 
-/* A helper macro to 'throw' a java exception. */ 
+/* A helper macro to 'throw' a java exception. */
 #define THROW(env, exception_name, message) \
 #define THROW(env, exception_name, message) \
   { \
   { \
 	jclass ecls = (*env)->FindClass(env, exception_name); \
 	jclass ecls = (*env)->FindClass(env, exception_name); \
@@ -55,13 +58,21 @@
     if ((*env)->ExceptionCheck(env)) return (ret); \
     if ((*env)->ExceptionCheck(env)) return (ret); \
   }
   }
 
 
-/** 
- * A helper function to dlsym a 'symbol' from a given library-handle. 
- * 
+/**
+ * Unix definitions
+ */
+#ifdef UNIX
+#include <config.h>
+#include <dlfcn.h>
+#include <jni.h>
+
+/**
+ * A helper function to dlsym a 'symbol' from a given library-handle.
+ *
  * @param env jni handle to report contingencies.
  * @param env jni handle to report contingencies.
  * @param handle handle to the dlopen'ed library.
  * @param handle handle to the dlopen'ed library.
  * @param symbol symbol to load.
  * @param symbol symbol to load.
- * @return returns the address where the symbol is loaded in memory, 
+ * @return returns the address where the symbol is loaded in memory,
  *         <code>NULL</code> on error.
  *         <code>NULL</code> on error.
  */
  */
 static __attribute__ ((unused))
 static __attribute__ ((unused))
@@ -84,6 +95,76 @@ void *do_dlsym(JNIEnv *env, void *handle, const char *symbol) {
   if ((func_ptr = do_dlsym(env, handle, symbol)) == NULL) { \
   if ((func_ptr = do_dlsym(env, handle, symbol)) == NULL) { \
     return; \
     return; \
   }
   }
+#endif
+// Unix part end
+
+
+/**
+ * Windows definitions
+ */
+#ifdef WINDOWS
+
+/* Force using Unicode throughout the code */
+#ifndef UNICODE
+#define UNICODE
+#endif
+
+/* Microsoft C Compiler does not support the C99 inline keyword */
+#ifndef __cplusplus
+#define inline __inline;
+#endif // _cplusplus
+
+/* Optimization macros supported by GCC but for which there is no
+   direct equivalent in the Microsoft C compiler */
+#define likely(_c) (_c)
+#define unlikely(_c) (_c)
+
+/* Disable certain warnings in the native CRC32 code. */
+#pragma warning(disable:4018)		// Signed/unsigned mismatch.
+#pragma warning(disable:4244)		// Possible loss of data in conversion.
+#pragma warning(disable:4267)		// Possible loss of data.
+#pragma warning(disable:4996)		// Use of deprecated function.
+
+#include <Windows.h>
+#include <stdio.h>
+#include <jni.h>
+
+#define snprintf(a, b ,c, d) _snprintf_s((a), (b), _TRUNCATE, (c), (d))
+
+/* A helper macro to dlsym the requisite dynamic symbol and bail-out on error. */
+#define LOAD_DYNAMIC_SYMBOL(func_type, func_ptr, env, handle, symbol) \
+  if ((func_ptr = (func_type) do_dlsym(env, handle, symbol)) == NULL) { \
+    return; \
+  }
+
+/**
+ * A helper function to dynamic load a 'symbol' from a given library-handle.
+ *
+ * @param env jni handle to report contingencies.
+ * @param handle handle to the dynamic library.
+ * @param symbol symbol to load.
+ * @return returns the address where the symbol is loaded in memory,
+ *         <code>NULL</code> on error.
+ */
+static FARPROC WINAPI do_dlsym(JNIEnv *env, HMODULE handle, LPCSTR symbol) {
+  DWORD dwErrorCode = ERROR_SUCCESS;
+  FARPROC func_ptr = NULL;
+
+  if (!env || !handle || !symbol) {
+    THROW(env, "java/lang/InternalError", NULL);
+    return NULL;
+  }
+
+  func_ptr = GetProcAddress(handle, symbol);
+  if (func_ptr == NULL)
+  {
+    THROW(env, "java/lang/UnsatisfiedLinkError", symbol);
+  }
+  return func_ptr;
+}
+#endif
+// Windows part end
+
 
 
 #define LOCK_CLASS(env, clazz, classname) \
 #define LOCK_CLASS(env, clazz, classname) \
   if ((*env)->MonitorEnter(env, clazz) != 0) { \
   if ((*env)->MonitorEnter(env, clazz) != 0) { \

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/native/src/test/org/apache/hadoop/util/test_bulk_crc32.c

@@ -16,6 +16,8 @@
  * limitations under the License.
  * limitations under the License.
  */
  */
 
 
+#include "org_apache_hadoop.h"
+
 #include "bulk_crc32.h"
 #include "bulk_crc32.h"
 
 
 #include <stdint.h>
 #include <stdint.h>

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/winutils/libwinutils.c

@@ -145,7 +145,7 @@ static BOOL IsPrefixedAlready(__in PCWSTR path)
 {
 {
   static const PCWSTR LongPathPrefix = L"\\\\?\\";
   static const PCWSTR LongPathPrefix = L"\\\\?\\";
   int Prefixlen = (int)wcslen(LongPathPrefix);
   int Prefixlen = (int)wcslen(LongPathPrefix);
-  int i = 0;
+  size_t i = 0;
 
 
   if (path == NULL || wcslen(path) < Prefixlen)
   if (path == NULL || wcslen(path) < Prefixlen)
   {
   {

+ 5 - 1
hadoop-common-project/hadoop-common/src/main/winutils/libwinutils.vcxproj

@@ -70,12 +70,16 @@
   </PropertyGroup>
   </PropertyGroup>
   <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
   <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
     <LinkIncremental>true</LinkIncremental>
     <LinkIncremental>true</LinkIncremental>
+    <OutDir />
+    <IntDir>..\..\..\target\winutils\$(Configuration)\</IntDir>
   </PropertyGroup>
   </PropertyGroup>
   <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
   <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
     <LinkIncremental>false</LinkIncremental>
     <LinkIncremental>false</LinkIncremental>
   </PropertyGroup>
   </PropertyGroup>
   <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
   <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
     <LinkIncremental>false</LinkIncremental>
     <LinkIncremental>false</LinkIncremental>
+    <OutDir>..\..\..\target\bin\</OutDir>
+    <IntDir>..\..\..\target\winutils\$(Platform)\$(Configuration)\</IntDir>
   </PropertyGroup>
   </PropertyGroup>
   <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
   <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
     <ClCompile>
     <ClCompile>
@@ -146,4 +150,4 @@
   <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
   <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
   <ImportGroup Label="ExtensionTargets">
   <ImportGroup Label="ExtensionTargets">
   </ImportGroup>
   </ImportGroup>
-</Project>
+</Project>

+ 39 - 39
hadoop-common-project/hadoop-common/src/main/winutils/winutils.sln

@@ -1,39 +1,39 @@
-
-Microsoft Visual Studio Solution File, Format Version 11.00
-# Visual Studio 2010
-Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "winutils", "winutils.vcxproj", "{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}"
-	ProjectSection(ProjectDependencies) = postProject
-		{12131AA7-902E-4A6D-9CE3-043261D22A12} = {12131AA7-902E-4A6D-9CE3-043261D22A12}
-	EndProjectSection
-EndProject
-Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "libwinutils", "libwinutils.vcxproj", "{12131AA7-902E-4A6D-9CE3-043261D22A12}"
-EndProject
-Global
-	GlobalSection(SolutionConfigurationPlatforms) = preSolution
-		Debug|Win32 = Debug|Win32
-		Debug|x64 = Debug|x64
-		Release|Win32 = Release|Win32
-		Release|x64 = Release|x64
-	EndGlobalSection
-	GlobalSection(ProjectConfigurationPlatforms) = postSolution
-		{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Debug|Win32.ActiveCfg = Debug|x64
-		{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Debug|Win32.Build.0 = Debug|x64
-		{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Debug|x64.ActiveCfg = Debug|x64
-		{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Debug|x64.Build.0 = Debug|x64
-		{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Release|Win32.ActiveCfg = Release|Win32
-		{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Release|Win32.Build.0 = Release|Win32
-		{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Release|x64.ActiveCfg = Release|x64
-		{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Release|x64.Build.0 = Release|x64
-		{12131AA7-902E-4A6D-9CE3-043261D22A12}.Debug|Win32.ActiveCfg = Debug|x64
-		{12131AA7-902E-4A6D-9CE3-043261D22A12}.Debug|Win32.Build.0 = Debug|x64
-		{12131AA7-902E-4A6D-9CE3-043261D22A12}.Debug|x64.ActiveCfg = Debug|x64
-		{12131AA7-902E-4A6D-9CE3-043261D22A12}.Debug|x64.Build.0 = Debug|x64
-		{12131AA7-902E-4A6D-9CE3-043261D22A12}.Release|Win32.ActiveCfg = Release|Win32
-		{12131AA7-902E-4A6D-9CE3-043261D22A12}.Release|Win32.Build.0 = Release|Win32
-		{12131AA7-902E-4A6D-9CE3-043261D22A12}.Release|x64.ActiveCfg = Release|x64
-		{12131AA7-902E-4A6D-9CE3-043261D22A12}.Release|x64.Build.0 = Release|x64
-	EndGlobalSection
-	GlobalSection(SolutionProperties) = preSolution
-		HideSolutionNode = FALSE
-	EndGlobalSection
-EndGlobal
+
+Microsoft Visual Studio Solution File, Format Version 11.00
+# Visual Studio 2010
+Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "winutils", "winutils.vcxproj", "{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}"
+	ProjectSection(ProjectDependencies) = postProject
+		{12131AA7-902E-4A6D-9CE3-043261D22A12} = {12131AA7-902E-4A6D-9CE3-043261D22A12}
+	EndProjectSection
+EndProject
+Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "libwinutils", "libwinutils.vcxproj", "{12131AA7-902E-4A6D-9CE3-043261D22A12}"
+EndProject
+Global
+	GlobalSection(SolutionConfigurationPlatforms) = preSolution
+		Debug|Win32 = Debug|Win32
+		Debug|x64 = Debug|x64
+		Release|Win32 = Release|Win32
+		Release|x64 = Release|x64
+	EndGlobalSection
+	GlobalSection(ProjectConfigurationPlatforms) = postSolution
+		{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Debug|Win32.ActiveCfg = Debug|x64
+		{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Debug|Win32.Build.0 = Debug|x64
+		{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Debug|x64.ActiveCfg = Debug|x64
+		{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Debug|x64.Build.0 = Debug|x64
+		{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Release|Win32.ActiveCfg = Release|Win32
+		{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Release|Win32.Build.0 = Release|Win32
+		{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Release|x64.ActiveCfg = Release|x64
+		{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Release|x64.Build.0 = Release|x64
+		{12131AA7-902E-4A6D-9CE3-043261D22A12}.Debug|Win32.ActiveCfg = Debug|x64
+		{12131AA7-902E-4A6D-9CE3-043261D22A12}.Debug|Win32.Build.0 = Debug|x64
+		{12131AA7-902E-4A6D-9CE3-043261D22A12}.Debug|x64.ActiveCfg = Debug|x64
+		{12131AA7-902E-4A6D-9CE3-043261D22A12}.Debug|x64.Build.0 = Debug|x64
+		{12131AA7-902E-4A6D-9CE3-043261D22A12}.Release|Win32.ActiveCfg = Release|Win32
+		{12131AA7-902E-4A6D-9CE3-043261D22A12}.Release|Win32.Build.0 = Release|Win32
+		{12131AA7-902E-4A6D-9CE3-043261D22A12}.Release|x64.ActiveCfg = Release|x64
+		{12131AA7-902E-4A6D-9CE3-043261D22A12}.Release|x64.Build.0 = Release|x64
+	EndGlobalSection
+	GlobalSection(SolutionProperties) = preSolution
+		HideSolutionNode = FALSE
+	EndGlobalSection
+EndGlobal

+ 4 - 0
hadoop-common-project/hadoop-common/src/main/winutils/winutils.vcxproj

@@ -70,12 +70,16 @@
   </PropertyGroup>
   </PropertyGroup>
   <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
   <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
     <LinkIncremental>true</LinkIncremental>
     <LinkIncremental>true</LinkIncremental>
+    <OutDir />
+    <IntDir>..\..\..\target\winutils\$(Configuration)\</IntDir>
   </PropertyGroup>
   </PropertyGroup>
   <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
   <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
     <LinkIncremental>false</LinkIncremental>
     <LinkIncremental>false</LinkIncremental>
   </PropertyGroup>
   </PropertyGroup>
   <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
   <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
     <LinkIncremental>false</LinkIncremental>
     <LinkIncremental>false</LinkIncremental>
+    <IntDir>..\..\..\target\winutils\$(Platform)\$(Configuration)\</IntDir>
+    <OutDir>..\..\..\target\bin\</OutDir>
   </PropertyGroup>
   </PropertyGroup>
   <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
   <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
     <ClCompile>
     <ClCompile>

+ 33 - 0
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java

@@ -29,6 +29,8 @@ import org.apache.hadoop.util.Shell;
 
 
 import junit.framework.TestCase;
 import junit.framework.TestCase;
 
 
+import static org.junit.Assert.fail;
+
 public class TestPath extends TestCase {
 public class TestPath extends TestCase {
   public void testToString() {
   public void testToString() {
     toStringTest("/");
     toStringTest("/");
@@ -165,6 +167,37 @@ public class TestPath extends TestCase {
     assertEquals(new Path("foo/bar/baz","../../../../..").toString(), "../..");
     assertEquals(new Path("foo/bar/baz","../../../../..").toString(), "../..");
   }
   }
 
 
+  /** Test that Windows paths are correctly handled */
+  public void testWindowsPaths() throws URISyntaxException, IOException {
+    if (!Path.WINDOWS) {
+      return;
+    }
+
+    assertEquals(new Path("c:\\foo\\bar").toString(), "c:/foo/bar");
+    assertEquals(new Path("c:/foo/bar").toString(), "c:/foo/bar");
+    assertEquals(new Path("/c:/foo/bar").toString(), "c:/foo/bar");
+    assertEquals(new Path("file://c:/foo/bar").toString(), "file://c:/foo/bar");
+  }
+
+  /** Test invalid paths on Windows are correctly rejected */
+  public void testInvalidWindowsPaths() throws URISyntaxException, IOException {
+    if (!Path.WINDOWS) {
+      return;
+    }
+
+    String [] invalidPaths = {
+        "hdfs:\\\\\\tmp"
+    };
+
+    for (String path : invalidPaths) {
+      try {
+        Path item = new Path(path);
+        fail("Did not throw for invalid path " + path);
+      } catch (IllegalArgumentException iae) {
+      }
+    }
+  }
+
   /** Test Path objects created from other Path objects */
   /** Test Path objects created from other Path objects */
   public void testChildParentResolution() throws URISyntaxException, IOException {
   public void testChildParentResolution() throws URISyntaxException, IOException {
     Path parent = new Path("foo1://bar1/baz1");
     Path parent = new Path("foo1://bar1/baz1");

+ 33 - 3
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java

@@ -19,8 +19,10 @@ package org.apache.hadoop.fs.shell;
 
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 
 import java.io.File;
 import java.io.File;
+import java.io.IOException;
 import java.util.Arrays;
 import java.util.Arrays;
 
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
@@ -122,18 +124,46 @@ public class TestPathData {
     }
     }
 
 
     // Can we handle raw Windows paths? The files need not exist for
     // Can we handle raw Windows paths? The files need not exist for
-    // the tests to succeed.
+    // these tests to succeed.
     String[] winPaths = {
     String[] winPaths = {
         "n:\\",
         "n:\\",
         "N:\\",
         "N:\\",
         "N:\\foo",
         "N:\\foo",
-        "N:\\foo\\bar"
+        "N:\\foo\\bar",
+        "N:/",
+        "N:/foo",
+        "N:/foo/bar"
     };
     };
 
 
+    PathData item;
+
     for (String path : winPaths) {
     for (String path : winPaths) {
-      PathData item = new PathData(path, conf);
+      item = new PathData(path, conf);
       assertEquals(new File(path), item.toFile());
       assertEquals(new File(path), item.toFile());
     }
     }
+
+    item = new PathData("foo\\bar", conf);
+    assertEquals(new File(testDir + "\\foo\\bar"), item.toFile());
+  }
+
+  @Test
+  public void testInvalidWindowsPath() throws Exception {
+    if (!Path.WINDOWS) {
+      return;
+    }
+
+    // Verify that the following invalid paths are rejected.
+    String [] winPaths = {
+        "N:\\foo/bar"
+    };
+
+    for (String path : winPaths) {
+      try {
+        PathData item = new PathData(path, conf);
+        fail("Did not throw for invalid path " + path);
+      } catch (IOException ioe) {
+      }
+    }
   }
   }
 
 
   @Test
   @Test

+ 167 - 29
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java

@@ -21,6 +21,8 @@ import java.io.File;
 import java.io.FileDescriptor;
 import java.io.FileDescriptor;
 import java.io.FileInputStream;
 import java.io.FileInputStream;
 import java.io.FileOutputStream;
 import java.io.FileOutputStream;
+import java.io.FileReader;
+import java.io.FileWriter;
 import java.io.IOException;
 import java.io.IOException;
 import java.util.concurrent.atomic.AtomicReference;
 import java.util.concurrent.atomic.AtomicReference;
 import java.util.ArrayList;
 import java.util.ArrayList;
@@ -59,9 +61,13 @@ public class TestNativeIO {
 
 
   @Test
   @Test
   public void testFstat() throws Exception {
   public void testFstat() throws Exception {
+    if (Path.WINDOWS) {
+      return;
+    }
+
     FileOutputStream fos = new FileOutputStream(
     FileOutputStream fos = new FileOutputStream(
       new File(TEST_DIR, "testfstat"));
       new File(TEST_DIR, "testfstat"));
-    NativeIO.Stat stat = NativeIO.getFstat(fos.getFD());
+    NativeIO.POSIX.Stat stat = NativeIO.POSIX.getFstat(fos.getFD());
     fos.close();
     fos.close();
     LOG.info("Stat: " + String.valueOf(stat));
     LOG.info("Stat: " + String.valueOf(stat));
 
 
@@ -69,7 +75,8 @@ public class TestNativeIO {
     assertNotNull(stat.getGroup());
     assertNotNull(stat.getGroup());
     assertTrue(!stat.getGroup().isEmpty());
     assertTrue(!stat.getGroup().isEmpty());
     assertEquals("Stat mode field should indicate a regular file",
     assertEquals("Stat mode field should indicate a regular file",
-      NativeIO.Stat.S_IFREG, stat.getMode() & NativeIO.Stat.S_IFMT);
+      NativeIO.POSIX.Stat.S_IFREG,
+      stat.getMode() & NativeIO.POSIX.Stat.S_IFMT);
   }
   }
 
 
   /**
   /**
@@ -80,6 +87,10 @@ public class TestNativeIO {
    */
    */
   @Test
   @Test
   public void testMultiThreadedFstat() throws Exception {
   public void testMultiThreadedFstat() throws Exception {
+    if (Path.WINDOWS) {
+      return;
+    }
+
     final FileOutputStream fos = new FileOutputStream(
     final FileOutputStream fos = new FileOutputStream(
       new File(TEST_DIR, "testfstat"));
       new File(TEST_DIR, "testfstat"));
 
 
@@ -93,12 +104,13 @@ public class TestNativeIO {
           long et = Time.now() + 5000;
           long et = Time.now() + 5000;
           while (Time.now() < et) {
           while (Time.now() < et) {
             try {
             try {
-              NativeIO.Stat stat = NativeIO.getFstat(fos.getFD());
+              NativeIO.POSIX.Stat stat = NativeIO.POSIX.getFstat(fos.getFD());
               assertEquals(System.getProperty("user.name"), stat.getOwner());
               assertEquals(System.getProperty("user.name"), stat.getOwner());
               assertNotNull(stat.getGroup());
               assertNotNull(stat.getGroup());
               assertTrue(!stat.getGroup().isEmpty());
               assertTrue(!stat.getGroup().isEmpty());
               assertEquals("Stat mode field should indicate a regular file",
               assertEquals("Stat mode field should indicate a regular file",
-                NativeIO.Stat.S_IFREG, stat.getMode() & NativeIO.Stat.S_IFMT);
+                NativeIO.POSIX.Stat.S_IFREG,
+                stat.getMode() & NativeIO.POSIX.Stat.S_IFMT);
             } catch (Throwable t) {
             } catch (Throwable t) {
               thrown.set(t);
               thrown.set(t);
             }
             }
@@ -121,24 +133,121 @@ public class TestNativeIO {
 
 
   @Test
   @Test
   public void testFstatClosedFd() throws Exception {
   public void testFstatClosedFd() throws Exception {
+    if (Path.WINDOWS) {
+      return;
+    }
+
     FileOutputStream fos = new FileOutputStream(
     FileOutputStream fos = new FileOutputStream(
       new File(TEST_DIR, "testfstat2"));
       new File(TEST_DIR, "testfstat2"));
     fos.close();
     fos.close();
     try {
     try {
-      NativeIO.Stat stat = NativeIO.getFstat(fos.getFD());
+      NativeIO.POSIX.Stat stat = NativeIO.POSIX.getFstat(fos.getFD());
     } catch (NativeIOException nioe) {
     } catch (NativeIOException nioe) {
       LOG.info("Got expected exception", nioe);
       LOG.info("Got expected exception", nioe);
       assertEquals(Errno.EBADF, nioe.getErrno());
       assertEquals(Errno.EBADF, nioe.getErrno());
     }
     }
   }
   }
 
 
+  @Test
+  public void testSetFilePointer() throws Exception {
+    if (!Path.WINDOWS) {
+      return;
+    }
+
+    LOG.info("Set a file pointer on Windows");
+    try {
+      File testfile = new File(TEST_DIR, "testSetFilePointer");
+      assertTrue("Create test subject",
+          testfile.exists() || testfile.createNewFile());
+      FileWriter writer = new FileWriter(testfile);
+      try {
+        for (int i = 0; i < 200; i++)
+          if (i < 100)
+            writer.write('a');
+          else
+            writer.write('b');
+        writer.flush();
+      } catch (Exception writerException) {
+        fail("Got unexpected exception: " + writerException.getMessage());
+      } finally {
+        writer.close();
+      }
+
+      FileDescriptor fd = NativeIO.Windows.createFile(
+          testfile.getCanonicalPath(),
+          NativeIO.Windows.GENERIC_READ,
+          NativeIO.Windows.FILE_SHARE_READ |
+          NativeIO.Windows.FILE_SHARE_WRITE |
+          NativeIO.Windows.FILE_SHARE_DELETE,
+          NativeIO.Windows.OPEN_EXISTING);
+      NativeIO.Windows.setFilePointer(fd, 120, NativeIO.Windows.FILE_BEGIN);
+      FileReader reader = new FileReader(fd);
+      try {
+        int c = reader.read();
+        assertTrue("Unexpected character: " + c, c == 'b');
+      } catch (Exception readerException) {
+        fail("Got unexpected exception: " + readerException.getMessage());
+      } finally {
+        reader.close();
+      }
+    } catch (Exception e) {
+      fail("Got unexpected exception: " + e.getMessage());
+    }
+  }
+
+  @Test
+  public void testCreateFile() throws Exception {
+    if (!Path.WINDOWS) {
+      return;
+    }
+
+    LOG.info("Open a file on Windows with SHARE_DELETE shared mode");
+    try {
+      File testfile = new File(TEST_DIR, "testCreateFile");
+      assertTrue("Create test subject",
+          testfile.exists() || testfile.createNewFile());
+
+      FileDescriptor fd = NativeIO.Windows.createFile(
+          testfile.getCanonicalPath(),
+          NativeIO.Windows.GENERIC_READ,
+          NativeIO.Windows.FILE_SHARE_READ |
+          NativeIO.Windows.FILE_SHARE_WRITE |
+          NativeIO.Windows.FILE_SHARE_DELETE,
+          NativeIO.Windows.OPEN_EXISTING);
+
+      FileInputStream fin = new FileInputStream(fd);
+      try {
+        fin.read();
+
+        File newfile = new File(TEST_DIR, "testRenamedFile");
+
+        boolean renamed = testfile.renameTo(newfile);
+        assertTrue("Rename failed.", renamed);
+
+        fin.read();
+      } catch (Exception e) {
+        fail("Got unexpected exception: " + e.getMessage());
+      }
+      finally {
+        fin.close();
+      }
+    } catch (Exception e) {
+      fail("Got unexpected exception: " + e.getMessage());
+    }
+
+  }
+
   @Test
   @Test
   public void testOpenMissingWithoutCreate() throws Exception {
   public void testOpenMissingWithoutCreate() throws Exception {
+    if (Path.WINDOWS) {
+      return;
+    }
+
     LOG.info("Open a missing file without O_CREAT and it should fail");
     LOG.info("Open a missing file without O_CREAT and it should fail");
     try {
     try {
-      FileDescriptor fd = NativeIO.open(
+      FileDescriptor fd = NativeIO.POSIX.open(
         new File(TEST_DIR, "doesntexist").getAbsolutePath(),
         new File(TEST_DIR, "doesntexist").getAbsolutePath(),
-        NativeIO.O_WRONLY, 0700);
+        NativeIO.POSIX.O_WRONLY, 0700);
       fail("Able to open a new file without O_CREAT");
       fail("Able to open a new file without O_CREAT");
     } catch (NativeIOException nioe) {
     } catch (NativeIOException nioe) {
       LOG.info("Got expected exception", nioe);
       LOG.info("Got expected exception", nioe);
@@ -148,10 +257,14 @@ public class TestNativeIO {
 
 
   @Test
   @Test
   public void testOpenWithCreate() throws Exception {
   public void testOpenWithCreate() throws Exception {
+    if (Path.WINDOWS) {
+      return;
+    }
+
     LOG.info("Test creating a file with O_CREAT");
     LOG.info("Test creating a file with O_CREAT");
-    FileDescriptor fd = NativeIO.open(
+    FileDescriptor fd = NativeIO.POSIX.open(
       new File(TEST_DIR, "testWorkingOpen").getAbsolutePath(),
       new File(TEST_DIR, "testWorkingOpen").getAbsolutePath(),
-      NativeIO.O_WRONLY | NativeIO.O_CREAT, 0700);
+      NativeIO.POSIX.O_WRONLY | NativeIO.POSIX.O_CREAT, 0700);
     assertNotNull(true);
     assertNotNull(true);
     assertTrue(fd.valid());
     assertTrue(fd.valid());
     FileOutputStream fos = new FileOutputStream(fd);
     FileOutputStream fos = new FileOutputStream(fd);
@@ -162,9 +275,9 @@ public class TestNativeIO {
 
 
     LOG.info("Test exclusive create");
     LOG.info("Test exclusive create");
     try {
     try {
-      fd = NativeIO.open(
+      fd = NativeIO.POSIX.open(
         new File(TEST_DIR, "testWorkingOpen").getAbsolutePath(),
         new File(TEST_DIR, "testWorkingOpen").getAbsolutePath(),
-        NativeIO.O_WRONLY | NativeIO.O_CREAT | NativeIO.O_EXCL, 0700);
+        NativeIO.POSIX.O_WRONLY | NativeIO.POSIX.O_CREAT | NativeIO.POSIX.O_EXCL, 0700);
       fail("Was able to create existing file with O_EXCL");
       fail("Was able to create existing file with O_EXCL");
     } catch (NativeIOException nioe) {
     } catch (NativeIOException nioe) {
       LOG.info("Got expected exception for failed exclusive create", nioe);
       LOG.info("Got expected exception for failed exclusive create", nioe);
@@ -178,10 +291,14 @@ public class TestNativeIO {
    */
    */
   @Test
   @Test
   public void testFDDoesntLeak() throws IOException {
   public void testFDDoesntLeak() throws IOException {
+    if (Path.WINDOWS) {
+      return;
+    }
+
     for (int i = 0; i < 10000; i++) {
     for (int i = 0; i < 10000; i++) {
-      FileDescriptor fd = NativeIO.open(
+      FileDescriptor fd = NativeIO.POSIX.open(
         new File(TEST_DIR, "testNoFdLeak").getAbsolutePath(),
         new File(TEST_DIR, "testNoFdLeak").getAbsolutePath(),
-        NativeIO.O_WRONLY | NativeIO.O_CREAT, 0700);
+        NativeIO.POSIX.O_WRONLY | NativeIO.POSIX.O_CREAT, 0700);
       assertNotNull(true);
       assertNotNull(true);
       assertTrue(fd.valid());
       assertTrue(fd.valid());
       FileOutputStream fos = new FileOutputStream(fd);
       FileOutputStream fos = new FileOutputStream(fd);
@@ -195,8 +312,12 @@ public class TestNativeIO {
    */
    */
   @Test
   @Test
   public void testChmod() throws Exception {
   public void testChmod() throws Exception {
+    if (Path.WINDOWS) {
+      return;
+    }
+
     try {
     try {
-      NativeIO.chmod("/this/file/doesnt/exist", 777);
+      NativeIO.POSIX.chmod("/this/file/doesnt/exist", 777);
       fail("Chmod of non-existent file didn't fail");
       fail("Chmod of non-existent file didn't fail");
     } catch (NativeIOException nioe) {
     } catch (NativeIOException nioe) {
       assertEquals(Errno.ENOENT, nioe.getErrno());
       assertEquals(Errno.ENOENT, nioe.getErrno());
@@ -205,21 +326,26 @@ public class TestNativeIO {
     File toChmod = new File(TEST_DIR, "testChmod");
     File toChmod = new File(TEST_DIR, "testChmod");
     assertTrue("Create test subject",
     assertTrue("Create test subject",
                toChmod.exists() || toChmod.mkdir());
                toChmod.exists() || toChmod.mkdir());
-    NativeIO.chmod(toChmod.getAbsolutePath(), 0777);
+    NativeIO.POSIX.chmod(toChmod.getAbsolutePath(), 0777);
     assertPermissions(toChmod, 0777);
     assertPermissions(toChmod, 0777);
-    NativeIO.chmod(toChmod.getAbsolutePath(), 0000);
+    NativeIO.POSIX.chmod(toChmod.getAbsolutePath(), 0000);
     assertPermissions(toChmod, 0000);
     assertPermissions(toChmod, 0000);
-    NativeIO.chmod(toChmod.getAbsolutePath(), 0644);
+    NativeIO.POSIX.chmod(toChmod.getAbsolutePath(), 0644);
     assertPermissions(toChmod, 0644);
     assertPermissions(toChmod, 0644);
   }
   }
 
 
 
 
   @Test
   @Test
   public void testPosixFadvise() throws Exception {
   public void testPosixFadvise() throws Exception {
+    if (Path.WINDOWS) {
+      return;
+    }
+
     FileInputStream fis = new FileInputStream("/dev/zero");
     FileInputStream fis = new FileInputStream("/dev/zero");
     try {
     try {
-      NativeIO.posix_fadvise(fis.getFD(), 0, 0,
-                             NativeIO.POSIX_FADV_SEQUENTIAL);
+      NativeIO.POSIX.posix_fadvise(
+          fis.getFD(), 0, 0,
+          NativeIO.POSIX.POSIX_FADV_SEQUENTIAL);
     } catch (UnsupportedOperationException uoe) {
     } catch (UnsupportedOperationException uoe) {
       // we should just skip the unit test on machines where we don't
       // we should just skip the unit test on machines where we don't
       // have fadvise support
       // have fadvise support
@@ -232,8 +358,9 @@ public class TestNativeIO {
     }
     }
 
 
     try {
     try {
-      NativeIO.posix_fadvise(fis.getFD(), 0, 1024,
-                             NativeIO.POSIX_FADV_SEQUENTIAL);
+      NativeIO.POSIX.posix_fadvise(
+          fis.getFD(), 0, 1024,
+          NativeIO.POSIX.POSIX_FADV_SEQUENTIAL);
 
 
       fail("Did not throw on bad file");
       fail("Did not throw on bad file");
     } catch (NativeIOException nioe) {
     } catch (NativeIOException nioe) {
@@ -241,8 +368,9 @@ public class TestNativeIO {
     }
     }
     
     
     try {
     try {
-      NativeIO.posix_fadvise(null, 0, 1024,
-                             NativeIO.POSIX_FADV_SEQUENTIAL);
+      NativeIO.POSIX.posix_fadvise(
+          null, 0, 1024,
+          NativeIO.POSIX.POSIX_FADV_SEQUENTIAL);
 
 
       fail("Did not throw on null file");
       fail("Did not throw on null file");
     } catch (NullPointerException npe) {
     } catch (NullPointerException npe) {
@@ -256,8 +384,9 @@ public class TestNativeIO {
       new File(TEST_DIR, "testSyncFileRange"));
       new File(TEST_DIR, "testSyncFileRange"));
     try {
     try {
       fos.write("foo".getBytes());
       fos.write("foo".getBytes());
-      NativeIO.sync_file_range(fos.getFD(), 0, 1024,
-                               NativeIO.SYNC_FILE_RANGE_WRITE);
+      NativeIO.POSIX.sync_file_range(
+          fos.getFD(), 0, 1024,
+          NativeIO.POSIX.SYNC_FILE_RANGE_WRITE);
       // no way to verify that this actually has synced,
       // no way to verify that this actually has synced,
       // but if it doesn't throw, we can assume it worked
       // but if it doesn't throw, we can assume it worked
     } catch (UnsupportedOperationException uoe) {
     } catch (UnsupportedOperationException uoe) {
@@ -268,8 +397,9 @@ public class TestNativeIO {
       fos.close();
       fos.close();
     }
     }
     try {
     try {
-      NativeIO.sync_file_range(fos.getFD(), 0, 1024,
-                               NativeIO.SYNC_FILE_RANGE_WRITE);
+      NativeIO.POSIX.sync_file_range(
+          fos.getFD(), 0, 1024,
+          NativeIO.POSIX.SYNC_FILE_RANGE_WRITE);
       fail("Did not throw on bad file");
       fail("Did not throw on bad file");
     } catch (NativeIOException nioe) {
     } catch (NativeIOException nioe) {
       assertEquals(Errno.EBADF, nioe.getErrno());
       assertEquals(Errno.EBADF, nioe.getErrno());
@@ -285,12 +415,20 @@ public class TestNativeIO {
 
 
   @Test
   @Test
   public void testGetUserName() throws IOException {
   public void testGetUserName() throws IOException {
-    assertFalse(NativeIO.getUserName(0).isEmpty());
+    if (Path.WINDOWS) {
+      return;
+    }
+
+    assertFalse(NativeIO.POSIX.getUserName(0).isEmpty());
   }
   }
 
 
   @Test
   @Test
   public void testGetGroupName() throws IOException {
   public void testGetGroupName() throws IOException {
-    assertFalse(NativeIO.getGroupName(0).isEmpty());
+    if (Path.WINDOWS) {
+      return;
+    }
+
+    assertFalse(NativeIO.POSIX.getGroupName(0).isEmpty());
   }
   }
 
 
 }
 }

+ 4 - 4
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java

@@ -602,13 +602,13 @@ class BlockReceiver implements Closeable {
           offsetInBlock > lastCacheDropOffset + CACHE_DROP_LAG_BYTES) {
           offsetInBlock > lastCacheDropOffset + CACHE_DROP_LAG_BYTES) {
         long twoWindowsAgo = lastCacheDropOffset - CACHE_DROP_LAG_BYTES;
         long twoWindowsAgo = lastCacheDropOffset - CACHE_DROP_LAG_BYTES;
         if (twoWindowsAgo > 0 && dropCacheBehindWrites) {
         if (twoWindowsAgo > 0 && dropCacheBehindWrites) {
-          NativeIO.posixFadviseIfPossible(outFd, 0, lastCacheDropOffset,
-              NativeIO.POSIX_FADV_DONTNEED);
+          NativeIO.POSIX.posixFadviseIfPossible(outFd, 0, lastCacheDropOffset,
+              NativeIO.POSIX.POSIX_FADV_DONTNEED);
         }
         }
         
         
         if (syncBehindWrites) {
         if (syncBehindWrites) {
-          NativeIO.syncFileRangeIfPossible(outFd, lastCacheDropOffset, CACHE_DROP_LAG_BYTES,
-              NativeIO.SYNC_FILE_RANGE_WRITE);
+          NativeIO.POSIX.syncFileRangeIfPossible(outFd, lastCacheDropOffset, CACHE_DROP_LAG_BYTES,
+              NativeIO.POSIX.SYNC_FILE_RANGE_WRITE);
         }
         }
         
         
         lastCacheDropOffset += CACHE_DROP_LAG_BYTES;
         lastCacheDropOffset += CACHE_DROP_LAG_BYTES;

+ 6 - 5
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockSender.java

@@ -319,9 +319,9 @@ class BlockSender implements java.io.Closeable {
     if (blockInFd != null && shouldDropCacheBehindRead && isLongRead()) {
     if (blockInFd != null && shouldDropCacheBehindRead && isLongRead()) {
       // drop the last few MB of the file from cache
       // drop the last few MB of the file from cache
       try {
       try {
-        NativeIO.posixFadviseIfPossible(
+        NativeIO.POSIX.posixFadviseIfPossible(
             blockInFd, lastCacheDropOffset, offset - lastCacheDropOffset,
             blockInFd, lastCacheDropOffset, offset - lastCacheDropOffset,
-            NativeIO.POSIX_FADV_DONTNEED);
+            NativeIO.POSIX.POSIX_FADV_DONTNEED);
       } catch (Exception e) {
       } catch (Exception e) {
         LOG.warn("Unable to drop cache on file close", e);
         LOG.warn("Unable to drop cache on file close", e);
       }
       }
@@ -618,7 +618,8 @@ class BlockSender implements java.io.Closeable {
 
 
     if (isLongRead() && blockInFd != null) {
     if (isLongRead() && blockInFd != null) {
       // Advise that this file descriptor will be accessed sequentially.
       // Advise that this file descriptor will be accessed sequentially.
-      NativeIO.posixFadviseIfPossible(blockInFd, 0, 0, NativeIO.POSIX_FADV_SEQUENTIAL);
+      NativeIO.POSIX.posixFadviseIfPossible(
+          blockInFd, 0, 0, NativeIO.POSIX.POSIX_FADV_SEQUENTIAL);
     }
     }
     
     
     // Trigger readahead of beginning of file if configured.
     // Trigger readahead of beginning of file if configured.
@@ -703,9 +704,9 @@ class BlockSender implements java.io.Closeable {
         offset >= nextCacheDropOffset) {
         offset >= nextCacheDropOffset) {
       long dropLength = offset - lastCacheDropOffset;
       long dropLength = offset - lastCacheDropOffset;
       if (dropLength >= 1024) {
       if (dropLength >= 1024) {
-        NativeIO.posixFadviseIfPossible(blockInFd,
+        NativeIO.POSIX.posixFadviseIfPossible(blockInFd,
             lastCacheDropOffset, dropLength,
             lastCacheDropOffset, dropLength,
-            NativeIO.POSIX_FADV_DONTNEED);
+            NativeIO.POSIX.POSIX_FADV_DONTNEED);
       }
       }
       lastCacheDropOffset += CACHE_DROP_INTERVAL_BYTES;
       lastCacheDropOffset += CACHE_DROP_INTERVAL_BYTES;
     }
     }

+ 4 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/FadvisedChunkedFile.java

@@ -69,8 +69,10 @@ public class FadvisedChunkedFile extends ChunkedFile {
     }
     }
     if (manageOsCache && getEndOffset() - getStartOffset() > 0) {
     if (manageOsCache && getEndOffset() - getStartOffset() > 0) {
       try {
       try {
-        NativeIO.posixFadviseIfPossible(fd, getStartOffset(), getEndOffset()
-            - getStartOffset(), NativeIO.POSIX_FADV_DONTNEED);
+        NativeIO.POSIX.posixFadviseIfPossible(
+            fd,
+            getStartOffset(), getEndOffset() - getStartOffset(),
+            NativeIO.POSIX.POSIX_FADV_DONTNEED);
       } catch (Throwable t) {
       } catch (Throwable t) {
         LOG.warn("Failed to manage OS cache for " + identifier, t);
         LOG.warn("Failed to manage OS cache for " + identifier, t);
       }
       }

+ 3 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/FadvisedFileRegion.java

@@ -71,8 +71,9 @@ public class FadvisedFileRegion extends DefaultFileRegion {
     }
     }
     if (manageOsCache && getCount() > 0) {
     if (manageOsCache && getCount() > 0) {
       try {
       try {
-        NativeIO.posixFadviseIfPossible(fd, getPosition(), getCount(),
-            NativeIO.POSIX_FADV_DONTNEED);
+        NativeIO.POSIX.posixFadviseIfPossible(
+           fd, getPosition(), getCount(),
+           NativeIO.POSIX.POSIX_FADV_DONTNEED);
       } catch (Throwable t) {
       } catch (Throwable t) {
         LOG.warn("Failed to manage OS cache for " + identifier, t);
         LOG.warn("Failed to manage OS cache for " + identifier, t);
       }
       }

+ 22 - 0
hadoop-project/pom.xml

@@ -880,6 +880,28 @@
         <build.platform>Mac_OS_X-${sun.arch.data.model}</build.platform>
         <build.platform>Mac_OS_X-${sun.arch.data.model}</build.platform>
       </properties>
       </properties>
     </profile>
     </profile>
+    <profile>
+      <id>native-win</id>
+      <activation>
+        <os>
+          <family>Windows</family>
+        </os>
+      </activation>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-surefire-plugin</artifactId>
+            <configuration>
+              <environmentVariables>
+                <!-- Specify where to look for the native DLL on Windows -->
+                <PATH>${env.PATH};${basedir}/../../hadoop-common-project/hadoop-common/target/bin</PATH>
+              </environmentVariables>
+            </configuration>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
     <profile>
     <profile>
       <id>test-patch</id>
       <id>test-patch</id>
       <activation>
       <activation>