Browse Source

HADOOP-11775. Fix Javadoc typos in hadoop-openstack module (Yanjun Wang via aw)

Allen Wittenauer 10 years ago
parent
commit
ed0f4db0dd
15 changed files with 27 additions and 24 deletions
  1. 3 0
      hadoop-common-project/hadoop-common/CHANGES.txt
  2. 1 1
      hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/http/ExceptionDiags.java
  3. 1 1
      hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/http/HttpBodyContent.java
  4. 2 2
      hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/http/SwiftProtocolConstants.java
  5. 3 3
      hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/http/SwiftRestClient.java
  6. 1 1
      hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/package.html
  7. 2 2
      hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/snative/SwiftNativeFileSystem.java
  8. 2 2
      hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/snative/SwiftNativeFileSystemStore.java
  9. 1 1
      hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/snative/SwiftNativeOutputStream.java
  10. 1 1
      hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/util/DurationStats.java
  11. 3 3
      hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/util/SwiftTestUtils.java
  12. 4 4
      hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/util/SwiftUtils.java
  13. 1 1
      hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/SwiftFileSystemBaseTest.java
  14. 1 1
      hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemDirectories.java
  15. 1 1
      hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemExtendedContract.java

+ 3 - 0
hadoop-common-project/hadoop-common/CHANGES.txt

@@ -457,6 +457,9 @@ Trunk (Unreleased)
     HADOOP-11865. Incorrect path mentioned in document for accessing script
     files (J.Andreina via aw)
 
+    HADOOP-11775. Fix Javadoc typos in hadoop-openstack module (Yanjun Wang
+    via aw)
+
   OPTIMIZATIONS
 
     HADOOP-7761. Improve the performance of raw comparisons. (todd)

+ 1 - 1
hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/http/ExceptionDiags.java

@@ -29,7 +29,7 @@ import java.net.SocketTimeoutException;
 import java.net.UnknownHostException;
 
 /**
- * Variant of Hadoop Netutils exception wrapping with URI awareness and
+ * Variant of Hadoop NetUtils exception wrapping with URI awareness and
  * available in branch-1 too.
  */
 public class ExceptionDiags {

+ 1 - 1
hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/http/HttpBodyContent.java

@@ -26,7 +26,7 @@ public class HttpBodyContent {
 
   /**
    * build a body response
-   * @param inputStream input stream from the operatin
+   * @param inputStream input stream from the operation
    * @param contentLength length of content; may be -1 for "don't know"
    */
   public HttpBodyContent(HttpInputStreamWithRelease inputStream,

+ 2 - 2
hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/http/SwiftProtocolConstants.java

@@ -66,7 +66,7 @@ public class SwiftProtocolConstants {
    */
   public static final String SERVICE_CATALOG_SWIFT = "swift";
   /**
-   *  section in the JSON catalog provided after auth listing the cloudfiles;
+   *  section in the JSON catalog provided after auth listing the cloud files;
    * this is an alternate catalog entry name
    *  {@value}
    */
@@ -185,7 +185,7 @@ public class SwiftProtocolConstants {
     FS_SWIFT + ".requestsize";
 
   /**
-   * The default reqeuest size for reads: {@value}
+   * The default request size for reads: {@value}
    */
   public static final int DEFAULT_SWIFT_REQUEST_SIZE = 64;
 

+ 3 - 3
hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/http/SwiftRestClient.java

@@ -83,7 +83,7 @@ import static org.apache.hadoop.fs.swift.http.SwiftProtocolConstants.*;
  * This implements the client-side of the Swift REST API
  *
  * The core actions put, get and query data in the Swift object store,
- * after authenticationg the client.
+ * after authenticating the client.
  *
  * <b>Logging:</b>
  *
@@ -1572,7 +1572,7 @@ public final class SwiftRestClient {
    * This is public for unit testing
    *
    * @param path path to object
-   * @param endpointURI damain url e.g. http://domain.com
+   * @param endpointURI domain url e.g. http://domain.com
    * @return valid URI for object
    * @throws SwiftException
    */
@@ -1653,7 +1653,7 @@ public final class SwiftRestClient {
    * Execute a method in a new HttpClient instance.
    * If the auth failed, authenticate then retry the method.
    *
-   * @param method methot to exec
+   * @param method method to exec
    * @param <M> Method type
    * @return the status code
    * @throws IOException on any failure

+ 1 - 1
hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/package.html

@@ -64,7 +64,7 @@ This has led to a different design pattern from S3, as instead of simple bucket
 the hostname of an S3 container is two-level, the name of the service provider
 being the second path: <code>swift://bucket.service/</code>
 
-The <code>service</code> portion of this domainame is used as a reference into
+The <code>service</code> portion of this domain name is used as a reference into
 the client settings -and so identify the service provider of that container.
 
 

+ 2 - 2
hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/snative/SwiftNativeFileSystem.java

@@ -301,7 +301,7 @@ public class SwiftNativeFileSystem extends FileSystem {
    * creating directories until one that exists is found.
    *
    * This strategy means if a file is created in an existing directory,
-   * one quick poll sufficies.
+   * one quick poll suffices.
    *
    * There is a big assumption here: that all parent directories of an existing
    * directory also exists.
@@ -540,7 +540,7 @@ public class SwiftNativeFileSystem extends FileSystem {
    * Low-level operation to also set the block size for this operation
    * @param path       the file name to open
    * @param bufferSize the size of the buffer to be used.
-   * @param readBlockSize how big should the read blockk/buffer size be?
+   * @param readBlockSize how big should the read block/buffer size be?
    * @return the input stream
    * @throws FileNotFoundException if the file is not found
    * @throws IOException any IO problem

+ 2 - 2
hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/snative/SwiftNativeFileSystemStore.java

@@ -590,7 +590,7 @@ public class SwiftNativeFileSystemStore {
           //outcome #2 -move to subdir of dest
           destPath = toObjectPath(new Path(dst, src.getName()));
         } else {
-          //outcome #1 dest it's a file: fail if differeent
+          //outcome #1 dest it's a file: fail if different
           if (!renamingOnToSelf) {
             throw new FileAlreadyExistsException(
                     "cannot rename a file over one that already exists");
@@ -953,7 +953,7 @@ public class SwiftNativeFileSystemStore {
                                                         statuses, "; "));
     }
 
-    //delete the entries. including ourself.
+    //delete the entries. including ourselves.
     for (FileStatus entryStatus : statuses) {
       Path entryPath = entryStatus.getPath();
       try {

+ 1 - 1
hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/snative/SwiftNativeOutputStream.java

@@ -261,7 +261,7 @@ class SwiftNativeOutputStream extends OutputStream {
     assert len >= 0  : "remainder to write is negative";
     SwiftUtils.debug(LOG," writeToBackupStream(offset=%d, len=%d)", offset, len);
     if (len == 0) {
-      //no remainder -downgrade to noop
+      //no remainder -downgrade to no-op
       return;
     }
 

+ 1 - 1
hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/util/DurationStats.java

@@ -40,7 +40,7 @@ public class DurationStats {
   }
 
   /**
-   * construct from anothr stats entry;
+   * construct from another stats entry;
    * all value are copied.
    * @param that the source statistics
    */

+ 3 - 3
hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/util/SwiftTestUtils.java

@@ -165,7 +165,7 @@ public class SwiftTestUtils extends org.junit.Assert {
   }
 
   /**
-   * Read the file and convert to a byte dataaset
+   * Read the file and convert to a byte dataset
    * @param fs filesystem
    * @param path path to read from
    * @param len length of data to read
@@ -185,7 +185,7 @@ public class SwiftTestUtils extends org.junit.Assert {
   }
 
   /**
-   * Assert that tthe array src[0..len] and dest[] are equal
+   * Assert that the array src[0..len] and dest[] are equal
    * @param src source data
    * @param dest actual
    * @param len length of bytes to compare
@@ -522,7 +522,7 @@ public class SwiftTestUtils extends org.junit.Assert {
    * @param fs filesystem
    * @param dir directory to scan
    * @param subdir full path to look for
-   * @throws IOException IO probles
+   * @throws IOException IO problems
    */
   public static void assertListStatusFinds(FileSystem fs,
                                            Path dir,

+ 4 - 4
hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/util/SwiftUtils.java

@@ -82,7 +82,7 @@ public final class SwiftUtils {
   }
 
   /**
-   * Predicate: Is a swift object referring to the root direcory?
+   * Predicate: Is a swift object referring to the root directory?
    * @param swiftObject object to probe
    * @return true iff the object refers to the root
    */
@@ -147,7 +147,7 @@ public final class SwiftUtils {
    * @param fileSystem filesystem
    * @param path directory
    * @return a listing of the filestatuses of elements in the directory, one
-   * to a line, precedeed by the full path of the directory
+   * to a line, preceded by the full path of the directory
    * @throws IOException connectivity problems
    */
   public static String ls(FileSystem fileSystem, Path path) throws
@@ -169,7 +169,7 @@ public final class SwiftUtils {
   }
 
   /**
-   * Take an array of filestats and convert to a string (prefixed w/ a [01] counter
+   * Take an array of filestatus and convert to a string (prefixed w/ a [01] counter
    * @param stats array of stats
    * @param separator separator after every entry
    * @return a stringified set
@@ -184,7 +184,7 @@ public final class SwiftUtils {
 
   /**
    * Verify that the basic args to a read operation are valid;
-   * throws an exception if not -with meaningful text includeing
+   * throws an exception if not -with meaningful text including
    * @param buffer destination buffer
    * @param off offset
    * @param len number of bytes to read

+ 1 - 1
hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/SwiftFileSystemBaseTest.java

@@ -233,7 +233,7 @@ public class SwiftFileSystemBaseTest extends Assert implements
   /**
    * Get a string describing the outcome of a rename, by listing the dest
    * path and its parent along with some covering text
-   * @param src source patj
+   * @param src source path
    * @param dst dest path
    * @return a string for logs and exceptions
    * @throws IOException IO problems

+ 1 - 1
hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemDirectories.java

@@ -28,7 +28,7 @@ import java.io.FileNotFoundException;
 
 /**
  * Test swift-specific directory logic.
- * This class is HDFS-1 compatible; its designed to be subclases by something
+ * This class is HDFS-1 compatible; its designed to be subclasses by something
  * with HDFS2 extensions
  */
 public class TestSwiftFileSystemDirectories extends SwiftFileSystemBaseTest {

+ 1 - 1
hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemExtendedContract.java

@@ -131,7 +131,7 @@ public class TestSwiftFileSystemExtendedContract extends SwiftFileSystemBaseTest
     out.writeUTF("l");
     out.close();
     assertExists("lower case file", lower);
-    //verifEy the length of the upper file hasn't changed
+    //verify the length of the upper file hasn't changed
     assertExists("Original upper case file " + upper, upper);
     FileStatus newStatus = fs.getFileStatus(upper);
     assertEquals("Expected status:" + upperStatus