Explorar o código

HDFS-10914. Move remnants of oah.hdfs.client to hadoop-hdfs-client.

(cherry picked from commit 92e5e9159850c01635091ea6ded0d8ee76691a9a)

 Conflicts:
	hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/HdfsAdmin.java
Andrew Wang %!s(int64=8) %!d(string=hai) anos
pai
achega
b12f004474

+ 0 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/client/CreateEncryptionZoneFlag.java → hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/CreateEncryptionZoneFlag.java

@@ -68,4 +68,3 @@ public enum CreateEncryptionZoneFlag {
     return mode;
   }
 }
-

+ 24 - 25
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/client/HdfsAdmin.java → hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/HdfsAdmin.java

@@ -45,28 +45,27 @@ import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
 import org.apache.hadoop.hdfs.protocol.EncryptionZone;
 import org.apache.hadoop.hdfs.protocol.HdfsConstants;
 import org.apache.hadoop.security.AccessControlException;
-import org.apache.hadoop.hdfs.tools.DFSAdmin;
 
 /**
  * The public API for performing administrative functions on HDFS. Those writing
  * applications against HDFS should prefer this interface to directly accessing
  * functionality in DistributedFileSystem or DFSClient.
- * 
- * Note that this is distinct from the similarly-named {@link DFSAdmin}, which
+ *
+ * Note that this is distinct from the similarly-named DFSAdmin, which
  * is a class that provides the functionality for the CLI `hdfs dfsadmin ...'
  * commands.
  */
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 public class HdfsAdmin {
-  
+
   private DistributedFileSystem dfs;
   private static final FsPermission TRASH_PERMISSION = new FsPermission(
       FsAction.ALL, FsAction.ALL, FsAction.ALL, true);
-  
+
   /**
    * Create a new HdfsAdmin client.
-   * 
+   *
    * @param uri the unique URI of the HDFS file system to administer
    * @param conf configuration
    * @throws IOException in the event the file system could not be created
@@ -79,11 +78,11 @@ public class HdfsAdmin {
       dfs = (DistributedFileSystem)fs;
     }
   }
-  
+
   /**
    * Set the namespace quota (count of files, directories, and sym links) for a
    * directory.
-   * 
+   *
    * @param src the path to set the quota for
    * @param quota the value to set for the quota
    * @throws IOException in the event of error
@@ -91,22 +90,22 @@ public class HdfsAdmin {
   public void setQuota(Path src, long quota) throws IOException {
     dfs.setQuota(src, quota, HdfsConstants.QUOTA_DONT_SET);
   }
-  
+
   /**
    * Clear the namespace quota (count of files, directories and sym links) for a
    * directory.
-   * 
+   *
    * @param src the path to clear the quota of
    * @throws IOException in the event of error
    */
   public void clearQuota(Path src) throws IOException {
     dfs.setQuota(src, HdfsConstants.QUOTA_RESET, HdfsConstants.QUOTA_DONT_SET);
   }
-  
+
   /**
    * Set the storage space quota (size of files) for a directory. Note that
    * directories and sym links do not occupy storage space.
-   * 
+   *
    * @param src the path to set the space quota of
    * @param spaceQuota the value to set for the space quota
    * @throws IOException in the event of error
@@ -114,11 +113,11 @@ public class HdfsAdmin {
   public void setSpaceQuota(Path src, long spaceQuota) throws IOException {
     dfs.setQuota(src, HdfsConstants.QUOTA_DONT_SET, spaceQuota);
   }
-  
+
   /**
    * Clear the storage space quota (size of files) for a directory. Note that
    * directories and sym links do not occupy storage space.
-   * 
+   *
    * @param src the path to clear the space quota of
    * @throws IOException in the event of error
    */
@@ -151,7 +150,7 @@ public class HdfsAdmin {
   public void clearQuotaByStorageType(Path src, StorageType type) throws IOException {
     dfs.setQuotaByStorageType(src, type, HdfsConstants.QUOTA_RESET);
   }
-  
+
   /**
    * Allow snapshot on a directory.
    * @param path The path of the directory where snapshots will be taken.
@@ -159,7 +158,7 @@ public class HdfsAdmin {
   public void allowSnapshot(Path path) throws IOException {
     dfs.allowSnapshot(path);
   }
-  
+
   /**
    * Disallow snapshot on a directory.
    * @param path The path of the snapshottable directory.
@@ -170,7 +169,7 @@ public class HdfsAdmin {
 
   /**
    * Add a new CacheDirectiveInfo.
-   * 
+   *
    * @param info Information about a directive to add.
    * @param flags {@link CacheFlag}s to use for this operation.
    * @return the ID of the directive that was created.
@@ -180,10 +179,10 @@ public class HdfsAdmin {
       EnumSet<CacheFlag> flags) throws IOException {
   return dfs.addCacheDirective(info, flags);
   }
-  
+
   /**
    * Modify a CacheDirective.
-   * 
+   *
    * @param info Information about the directive to modify. You must set the ID
    *          to indicate which CacheDirective you want to modify.
    * @param flags {@link CacheFlag}s to use for this operation.
@@ -196,7 +195,7 @@ public class HdfsAdmin {
 
   /**
    * Remove a CacheDirective.
-   * 
+   *
    * @param id identifier of the CacheDirectiveInfo to remove
    * @throws IOException if the directive could not be removed
    */
@@ -207,7 +206,7 @@ public class HdfsAdmin {
 
   /**
    * List cache directives. Incrementally fetches results from the server.
-   * 
+   *
    * @param filter Filter parameters to use when listing the directives, null to
    *               list all directives visible to us.
    * @return A RemoteIterator which returns CacheDirectiveInfo objects.
@@ -222,7 +221,7 @@ public class HdfsAdmin {
    *
    * @param info
    *          The request to add a cache pool.
-   * @throws IOException 
+   * @throws IOException
    *          If the request could not be completed.
    */
   public void addCachePool(CachePoolInfo info) throws IOException {
@@ -234,19 +233,19 @@ public class HdfsAdmin {
    *
    * @param info
    *          The request to modify a cache pool.
-   * @throws IOException 
+   * @throws IOException
    *          If the request could not be completed.
    */
   public void modifyCachePool(CachePoolInfo info) throws IOException {
     dfs.modifyCachePool(info);
   }
-    
+
   /**
    * Remove a cache pool.
    *
    * @param poolName
    *          Name of the cache pool to remove.
-   * @throws IOException 
+   * @throws IOException
    *          if the cache pool did not exist, or could not be removed.
    */
   public void removeCachePool(String poolName) throws IOException {

+ 6 - 6
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/client/HdfsUtils.java → hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/HdfsUtils.java

@@ -20,8 +20,7 @@ package org.apache.hadoop.hdfs.client;
 import java.io.IOException;
 import java.net.URI;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -30,7 +29,8 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.protocol.HdfsConstants;
 import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction;
-import org.apache.hadoop.io.IOUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The public utility API for HDFS.
@@ -38,7 +38,7 @@ import org.apache.hadoop.io.IOUtils;
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 public class HdfsUtils {
-  private static final Log LOG = LogFactory.getLog(HdfsUtils.class);
+  public static final Logger LOG = LoggerFactory.getLogger(HdfsUtils.class);
 
   /**
    * Is the HDFS healthy?
@@ -54,7 +54,7 @@ public class HdfsUtils {
       throw new IllegalArgumentException("The scheme is not "
           + HdfsConstants.HDFS_URI_SCHEME + ", uri=" + uri);
     }
-    
+
     final Configuration conf = new Configuration();
     //disable FileSystem cache
     conf.setBoolean(String.format("fs.%s.impl.disable.cache", scheme), true);
@@ -80,7 +80,7 @@ public class HdfsUtils {
       }
       return false;
     } finally {
-      IOUtils.cleanup(LOG, fs);
+      IOUtils.closeQuietly(fs);
     }
   }
 }

+ 0 - 0
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/client/package-info.java → hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/package-info.java